diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 6c53304a9ee..5b1cf48df68 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -94,7 +94,7 @@ jobs: - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v7 + uses: dawidd6/action-download-artifact@v8 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/frontend @@ -105,7 +105,7 @@ jobs: - name: Download nightly wheels of intents if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v7 + uses: dawidd6/action-download-artifact@v8 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/intents-package @@ -531,7 +531,7 @@ jobs: - name: Generate artifact attestation if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' - uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 with: subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-digest: ${{ steps.push.outputs.digest }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index fb07d60da3b..6527a09e15f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1273,7 +1273,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'true' - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.0 with: fail_ci_if_error: true flags: full-suite @@ -1411,7 +1411,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'false' - uses: codecov/codecov-action@v5.1.2 + uses: codecov/codecov-action@v5.3.0 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7c9a076de64..ee7fad4bb4e 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.28.1 + uses: github/codeql-action/init@v3.28.4 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.28.1 + uses: github/codeql-action/analyze@v3.28.4 with: category: "/language:python" diff --git a/.strict-typing b/.strict-typing index 46b14f22660..1c0456a745d 100644 --- a/.strict-typing +++ b/.strict-typing @@ -237,6 +237,7 @@ homeassistant.components.homeassistant_green.* homeassistant.components.homeassistant_hardware.* homeassistant.components.homeassistant_sky_connect.* homeassistant.components.homeassistant_yellow.* +homeassistant.components.homee.* homeassistant.components.homekit.* homeassistant.components.homekit_controller homeassistant.components.homekit_controller.alarm_control_panel @@ -262,6 +263,7 @@ homeassistant.components.image_processing.* homeassistant.components.image_upload.* homeassistant.components.imap.* homeassistant.components.imgw_pib.* +homeassistant.components.incomfort.* homeassistant.components.input_button.* homeassistant.components.input_select.* homeassistant.components.input_text.* @@ -307,6 +309,7 @@ homeassistant.components.logbook.* homeassistant.components.logger.* homeassistant.components.london_underground.* homeassistant.components.lookin.* +homeassistant.components.lovelace.* homeassistant.components.luftdaten.* homeassistant.components.madvr.* homeassistant.components.manual.* diff --git a/CODEOWNERS b/CODEOWNERS index 3553297b851..f16b890d407 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -682,8 +682,6 @@ build.json @home-assistant/supervisor /homeassistant/components/iammeter/ @lewei50 /homeassistant/components/iaqualink/ @flz /tests/components/iaqualink/ @flz -/homeassistant/components/ibeacon/ @bdraco -/tests/components/ibeacon/ @bdraco /homeassistant/components/icloud/ @Quentame @nzapponi /tests/components/icloud/ @Quentame @nzapponi /homeassistant/components/idasen_desk/ @abmantis @@ -1410,8 +1408,8 @@ build.json @home-assistant/supervisor /homeassistant/components/solaredge_local/ @drobtravels @scheric /homeassistant/components/solarlog/ @Ernst79 @dontinelli /tests/components/solarlog/ @Ernst79 @dontinelli -/homeassistant/components/solax/ @squishykid -/tests/components/solax/ @squishykid +/homeassistant/components/solax/ @squishykid @Darsstar +/tests/components/solax/ @squishykid @Darsstar /homeassistant/components/soma/ @ratsept @sebfortier2288 /tests/components/soma/ @ratsept @sebfortier2288 /homeassistant/components/sonarr/ @ctalkington diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index f1f1835863b..d89a9595868 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -112,6 +112,11 @@ with contextlib.suppress(ImportError): # Ensure anyio backend is imported to avoid it being imported in the event loop from anyio._backends import _asyncio # noqa: F401 +with contextlib.suppress(ImportError): + # httpx will import trio if it is installed which does + # blocking I/O in the event loop. We want to avoid that. + import trio # noqa: F401 + if TYPE_CHECKING: from .runner import RuntimeConfig diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index 681f3f08555..f39511ad41a 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -26,5 +26,5 @@ "iot_class": "local_push", "loggers": ["aioacaia"], "quality_scale": "platinum", - "requirements": ["aioacaia==0.1.13"] + "requirements": ["aioacaia==0.1.14"] } diff --git a/homeassistant/components/airgradient/button.py b/homeassistant/components/airgradient/button.py index 32a9b5adedf..ea7b12062e8 100644 --- a/homeassistant/components/airgradient/button.py +++ b/homeassistant/components/airgradient/button.py @@ -18,7 +18,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN from .coordinator import AirGradientCoordinator -from .entity import AirGradientEntity +from .entity import AirGradientEntity, exception_handler + +PARALLEL_UPDATES = 1 @dataclass(frozen=True, kw_only=True) @@ -100,6 +102,7 @@ class AirGradientButton(AirGradientEntity, ButtonEntity): self.entity_description = description self._attr_unique_id = f"{coordinator.serial_number}-{description.key}" + @exception_handler async def async_press(self) -> None: """Press the button.""" await self.entity_description.press_fn(self.coordinator.client) diff --git a/homeassistant/components/airgradient/config_flow.py b/homeassistant/components/airgradient/config_flow.py index a2f9440d376..fa3e77beeca 100644 --- a/homeassistant/components/airgradient/config_flow.py +++ b/homeassistant/components/airgradient/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Airgradient.""" +from collections.abc import Mapping from typing import Any from airgradient import ( @@ -11,7 +12,12 @@ from airgradient import ( from awesomeversion import AwesomeVersion import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + SOURCE_USER, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_MODEL from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo @@ -95,10 +101,18 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id( current_measures.serial_number, raise_on_progress=False ) - self._abort_if_unique_id_configured() + if self.source == SOURCE_USER: + self._abort_if_unique_id_configured() + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() await self.set_configuration_source() - return self.async_create_entry( - title=current_measures.model, + if self.source == SOURCE_USER: + return self.async_create_entry( + title=current_measures.model, + data={CONF_HOST: user_input[CONF_HOST]}, + ) + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data={CONF_HOST: user_input[CONF_HOST]}, ) return self.async_show_form( @@ -106,3 +120,9 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_HOST): str}), errors=errors, ) + + async def async_step_reconfigure( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reconfiguration.""" + return await self.async_step_user() diff --git a/homeassistant/components/airgradient/coordinator.py b/homeassistant/components/airgradient/coordinator.py index 03d58645853..d2fc2a9de1b 100644 --- a/homeassistant/components/airgradient/coordinator.py +++ b/homeassistant/components/airgradient/coordinator.py @@ -55,7 +55,11 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]): measures = await self.client.get_current_measures() config = await self.client.get_config() except AirGradientError as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"error": str(error)}, + ) from error if measures.firmware_version != self._current_version: device_registry = dr.async_get(self.hass) device_entry = device_registry.async_get_device( diff --git a/homeassistant/components/airgradient/entity.py b/homeassistant/components/airgradient/entity.py index 588a799610b..51256051259 100644 --- a/homeassistant/components/airgradient/entity.py +++ b/homeassistant/components/airgradient/entity.py @@ -1,7 +1,11 @@ """Base class for AirGradient entities.""" -from airgradient import get_model_name +from collections.abc import Callable, Coroutine +from typing import Any, Concatenate +from airgradient import AirGradientConnectionError, AirGradientError, get_model_name + +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -26,3 +30,31 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]): serial_number=coordinator.serial_number, sw_version=measures.firmware_version, ) + + +def exception_handler[_EntityT: AirGradientEntity, **_P]( + func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]], +) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: + """Decorate AirGradient calls to handle exceptions. + + A decorator that wraps the passed in function, catches AirGradient errors. + """ + + async def handler(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: + try: + await func(self, *args, **kwargs) + except AirGradientConnectionError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={"error": str(error)}, + ) from error + + except AirGradientError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unknown_error", + translation_placeholders={"error": str(error)}, + ) from error + + return handler diff --git a/homeassistant/components/airgradient/number.py b/homeassistant/components/airgradient/number.py index 7fd282ddd8b..4265215fa25 100644 --- a/homeassistant/components/airgradient/number.py +++ b/homeassistant/components/airgradient/number.py @@ -19,7 +19,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN from .coordinator import AirGradientCoordinator -from .entity import AirGradientEntity +from .entity import AirGradientEntity, exception_handler + +PARALLEL_UPDATES = 1 @dataclass(frozen=True, kw_only=True) @@ -121,6 +123,7 @@ class AirGradientNumber(AirGradientEntity, NumberEntity): """Return the state of the number.""" return self.entity_description.value_fn(self.coordinator.data.config) + @exception_handler async def async_set_native_value(self, value: float) -> None: """Set the selected value.""" await self.entity_description.set_value_fn(self.coordinator.client, int(value)) diff --git a/homeassistant/components/airgradient/quality_scale.yaml b/homeassistant/components/airgradient/quality_scale.yaml index 43816401cdb..7a7f8d5ee1d 100644 --- a/homeassistant/components/airgradient/quality_scale.yaml +++ b/homeassistant/components/airgradient/quality_scale.yaml @@ -29,7 +29,7 @@ rules: unique-config-entry: done # Silver - action-exceptions: todo + action-exceptions: done config-entry-unloading: done docs-configuration-parameters: status: exempt @@ -38,7 +38,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: status: exempt comment: | @@ -68,9 +68,9 @@ rules: entity-device-class: done entity-disabled-by-default: done entity-translations: done - exception-translations: todo + exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/airgradient/select.py b/homeassistant/components/airgradient/select.py index af56802d842..8c15102ad3a 100644 --- a/homeassistant/components/airgradient/select.py +++ b/homeassistant/components/airgradient/select.py @@ -19,7 +19,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN, PM_STANDARD, PM_STANDARD_REVERSE from .coordinator import AirGradientCoordinator -from .entity import AirGradientEntity +from .entity import AirGradientEntity, exception_handler + +PARALLEL_UPDATES = 1 @dataclass(frozen=True, kw_only=True) @@ -216,6 +218,7 @@ class AirGradientSelect(AirGradientEntity, SelectEntity): """Return the state of the select.""" return self.entity_description.value_fn(self.coordinator.data.config) + @exception_handler async def async_select_option(self, option: str) -> None: """Change the selected option.""" await self.entity_description.set_value_fn(self.coordinator.client, option) diff --git a/homeassistant/components/airgradient/sensor.py b/homeassistant/components/airgradient/sensor.py index 273ba20d6b7..3b20b31f923 100644 --- a/homeassistant/components/airgradient/sensor.py +++ b/homeassistant/components/airgradient/sensor.py @@ -35,6 +35,8 @@ from .const import PM_STANDARD, PM_STANDARD_REVERSE from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class AirGradientMeasurementSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/airgradient/strings.json b/homeassistant/components/airgradient/strings.json index f3f78ea8fc9..4cf3a6a34ea 100644 --- a/homeassistant/components/airgradient/strings.json +++ b/homeassistant/components/airgradient/strings.json @@ -17,7 +17,9 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "invalid_version": "This firmware version is unsupported. Please upgrade the firmware of the device to at least version 3.1.1." + "invalid_version": "This firmware version is unsupported. Please upgrade the firmware of the device to at least version 3.1.1.", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "Please ensure you reconfigure against the same device." }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -165,5 +167,16 @@ "name": "Post data to Airgradient" } } + }, + "exceptions": { + "communication_error": { + "message": "An error occurred while communicating with the Airgradient device: {error}" + }, + "unknown_error": { + "message": "An unknown error occurred while communicating with the Airgradient device: {error}" + }, + "update_error": { + "message": "An error occurred while communicating with the Airgradient device: {error}" + } } } diff --git a/homeassistant/components/airgradient/switch.py b/homeassistant/components/airgradient/switch.py index 329f704e755..55835fa30a6 100644 --- a/homeassistant/components/airgradient/switch.py +++ b/homeassistant/components/airgradient/switch.py @@ -20,7 +20,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN from .coordinator import AirGradientCoordinator -from .entity import AirGradientEntity +from .entity import AirGradientEntity, exception_handler + +PARALLEL_UPDATES = 1 @dataclass(frozen=True, kw_only=True) @@ -99,11 +101,13 @@ class AirGradientSwitch(AirGradientEntity, SwitchEntity): """Return the state of the switch.""" return self.entity_description.value_fn(self.coordinator.data.config) + @exception_handler async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" await self.entity_description.set_value_fn(self.coordinator.client, True) await self.coordinator.async_request_refresh() + @exception_handler async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" await self.entity_description.set_value_fn(self.coordinator.client, False) diff --git a/homeassistant/components/airgradient/update.py b/homeassistant/components/airgradient/update.py index 7c040524243..12cec65f791 100644 --- a/homeassistant/components/airgradient/update.py +++ b/homeassistant/components/airgradient/update.py @@ -11,6 +11,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry, AirGradientCoordinator from .entity import AirGradientEntity +PARALLEL_UPDATES = 1 SCAN_INTERVAL = timedelta(hours=1) diff --git a/homeassistant/components/airnow/coordinator.py b/homeassistant/components/airnow/coordinator.py index 32185080d25..9434d368dbe 100644 --- a/homeassistant/components/airnow/coordinator.py +++ b/homeassistant/components/airnow/coordinator.py @@ -21,7 +21,6 @@ from .const import ( ATTR_API_CAT_DESCRIPTION, ATTR_API_CAT_LEVEL, ATTR_API_CATEGORY, - ATTR_API_PM25, ATTR_API_POLLUTANT, ATTR_API_REPORT_DATE, ATTR_API_REPORT_HOUR, @@ -91,18 +90,16 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): max_aqi_desc = obv[ATTR_API_CATEGORY][ATTR_API_CAT_DESCRIPTION] max_aqi_poll = pollutant - # Copy other data from PM2.5 Value - if obv[ATTR_API_AQI_PARAM] == ATTR_API_PM25: - # Copy Report Details - data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE] - data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR] - data[ATTR_API_REPORT_TZ] = obv[ATTR_API_REPORT_TZ] + # Copy Report Details + data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE] + data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR] + data[ATTR_API_REPORT_TZ] = obv[ATTR_API_REPORT_TZ] - # Copy Station Details - data[ATTR_API_STATE] = obv[ATTR_API_STATE] - data[ATTR_API_STATION] = obv[ATTR_API_STATION] - data[ATTR_API_STATION_LATITUDE] = obv[ATTR_API_STATION_LATITUDE] - data[ATTR_API_STATION_LONGITUDE] = obv[ATTR_API_STATION_LONGITUDE] + # Copy Station Details + data[ATTR_API_STATE] = obv[ATTR_API_STATE] + data[ATTR_API_STATION] = obv[ATTR_API_STATION] + data[ATTR_API_STATION_LATITUDE] = obv[ATTR_API_STATION_LATITUDE] + data[ATTR_API_STATION_LONGITUDE] = obv[ATTR_API_STATION_LONGITUDE] # Store Overall AQI data[ATTR_API_AQI] = max_aqi diff --git a/homeassistant/components/airzone/__init__.py b/homeassistant/components/airzone/__init__.py index 39e4f73aa38..aa168dce858 100644 --- a/homeassistant/components/airzone/__init__.py +++ b/homeassistant/components/airzone/__init__.py @@ -86,7 +86,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> b options = ConnectionOptions( entry.data[CONF_HOST], entry.data[CONF_PORT], - entry.data.get(CONF_ID, DEFAULT_SYSTEM_ID), + entry.data[CONF_ID], ) airzone = AirzoneLocalApi(aiohttp_client.async_get_clientsession(hass), options) @@ -120,3 +120,25 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> bool: + """Migrate an old entry.""" + if entry.version == 1 and entry.minor_version < 2: + # Add missing CONF_ID + system_id = entry.data.get(CONF_ID, DEFAULT_SYSTEM_ID) + new_data = entry.data.copy() + new_data[CONF_ID] = system_id + hass.config_entries.async_update_entry( + entry, + data=new_data, + minor_version=2, + ) + + _LOGGER.info( + "Migration to configuration version %s.%s successful", + entry.version, + entry.minor_version, + ) + + return True diff --git a/homeassistant/components/airzone/config_flow.py b/homeassistant/components/airzone/config_flow.py index b0a87dd4e57..c4088e950e9 100644 --- a/homeassistant/components/airzone/config_flow.py +++ b/homeassistant/components/airzone/config_flow.py @@ -44,6 +44,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN): _discovered_ip: str | None = None _discovered_mac: str | None = None + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,6 +54,9 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} if user_input is not None: + if CONF_ID not in user_input: + user_input[CONF_ID] = DEFAULT_SYSTEM_ID + self._async_abort_entries_match(user_input) airzone = AirzoneLocalApi( @@ -60,7 +64,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN): ConnectionOptions( user_input[CONF_HOST], user_input[CONF_PORT], - user_input.get(CONF_ID, DEFAULT_SYSTEM_ID), + user_input[CONF_ID], ), ) @@ -84,6 +88,9 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN): ) title = f"Airzone {user_input[CONF_HOST]}:{user_input[CONF_PORT]}" + if user_input[CONF_ID] != DEFAULT_SYSTEM_ID: + title += f" #{user_input[CONF_ID]}" + return self.async_create_entry(title=title, data=user_input) return self.async_show_form( diff --git a/homeassistant/components/anthropic/manifest.json b/homeassistant/components/anthropic/manifest.json index 7d51c458e4d..b5cbb36c034 100644 --- a/homeassistant/components/anthropic/manifest.json +++ b/homeassistant/components/anthropic/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/anthropic", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["anthropic==0.31.2"] + "requirements": ["anthropic==0.44.0"] } diff --git a/homeassistant/components/apcupsd/coordinator.py b/homeassistant/components/apcupsd/coordinator.py index 768e9605967..1ae12d8c4b0 100644 --- a/homeassistant/components/apcupsd/coordinator.py +++ b/homeassistant/components/apcupsd/coordinator.py @@ -44,7 +44,10 @@ class APCUPSdData(dict[str, str]): @property def serial_no(self) -> str | None: """Return the unique serial number of the UPS, if available.""" - return self.get("SERIALNO") + sn = self.get("SERIALNO") + # We had user reports that some UPS models simply return "Blank" as serial number, in + # which case we fall back to `None` to indicate that it is actually not available. + return None if sn == "Blank" else sn class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]): diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index bcfa95463d1..8edd6cf0f2b 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -320,6 +320,7 @@ class BackupSchedule: time: dt.time | None = None cron_event: CronSim | None = field(init=False, default=None) next_automatic_backup: datetime | None = field(init=False, default=None) + next_automatic_backup_additional = False @callback def apply( @@ -378,6 +379,14 @@ class BackupSchedule: # add a day to the next time to avoid scheduling at the same time again self.cron_event = CronSim(cron_pattern, now + timedelta(days=1)) + # Compare the computed next time with the next time from the cron pattern + # to determine if an additional backup has been scheduled + cron_event_configured = CronSim(cron_pattern, now) + next_configured_time = next(cron_event_configured) + self.next_automatic_backup_additional = next_time < next_configured_time + else: + self.next_automatic_backup_additional = False + async def _create_backup(now: datetime) -> None: """Create backup.""" manager.remove_next_backup_event = None diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 672dd5ebb13..70fc568c05c 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -61,6 +61,7 @@ async def handle_info( "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, "next_automatic_backup": manager.config.data.schedule.next_automatic_backup, + "next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional, }, ) @@ -329,7 +330,8 @@ async def handle_config_info( { "config": config | { - "next_automatic_backup": manager.config.data.schedule.next_automatic_backup + "next_automatic_backup": manager.config.data.schedule.next_automatic_backup, + "next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional, } }, ) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index b5aa6cfa12f..22f8aa8fdb8 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -16,11 +16,11 @@ "quality_scale": "internal", "requirements": [ "bleak==0.22.3", - "bleak-retry-connector==3.7.0", - "bluetooth-adapters==0.21.0", + "bleak-retry-connector==3.8.0", + "bluetooth-adapters==0.21.1", "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.22.0", "dbus-fast==2.30.2", - "habluetooth==3.9.2" + "habluetooth==3.12.0" ] } diff --git a/homeassistant/components/bosch_shc/__init__.py b/homeassistant/components/bosch_shc/__init__.py index 9a00029412d..2871bc52450 100644 --- a/homeassistant/components/bosch_shc/__init__.py +++ b/homeassistant/components/bosch_shc/__init__.py @@ -12,13 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr -from .const import ( - CONF_SSL_CERTIFICATE, - CONF_SSL_KEY, - DATA_POLLING_HANDLER, - DATA_SESSION, - DOMAIN, -) +from .const import CONF_SSL_CERTIFICATE, CONF_SSL_KEY, DOMAIN PLATFORMS = [ Platform.BINARY_SENSOR, @@ -30,7 +24,10 @@ PLATFORMS = [ _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type BoschConfigEntry = ConfigEntry[SHCSession] + + +async def async_setup_entry(hass: HomeAssistant, entry: BoschConfigEntry) -> bool: """Set up Bosch SHC from a config entry.""" data = entry.data @@ -53,10 +50,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if shc_info.updateState.name == "UPDATE_AVAILABLE": _LOGGER.warning("Please check for software updates in the Bosch Smart Home App") - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = { - DATA_SESSION: session, - } + entry.runtime_data = session device_registry = dr.async_get(hass) device_registry.async_get_or_create( @@ -76,23 +70,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.async_add_executor_job(session.stop_polling) await hass.async_add_executor_job(session.start_polling) - hass.data[DOMAIN][entry.entry_id][DATA_POLLING_HANDLER] = ( + entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_polling) ) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BoschConfigEntry) -> bool: """Unload a config entry.""" - session: SHCSession = hass.data[DOMAIN][entry.entry_id][DATA_SESSION] + await hass.async_add_executor_job(entry.runtime_data.stop_polling) - hass.data[DOMAIN][entry.entry_id][DATA_POLLING_HANDLER]() - hass.data[DOMAIN][entry.entry_id].pop(DATA_POLLING_HANDLER) - await hass.async_add_executor_job(session.stop_polling) - - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/bosch_shc/binary_sensor.py b/homeassistant/components/bosch_shc/binary_sensor.py index 342a3e3e417..dd0f31ea6f9 100644 --- a/homeassistant/components/bosch_shc/binary_sensor.py +++ b/homeassistant/components/bosch_shc/binary_sensor.py @@ -2,28 +2,27 @@ from __future__ import annotations -from boschshcpy import SHCBatteryDevice, SHCSession, SHCShutterContact +from boschshcpy import SHCBatteryDevice, SHCShutterContact from boschshcpy.device import SHCDevice from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_SESSION, DOMAIN +from . import BoschConfigEntry from .entity import SHCEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BoschConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the SHC binary sensor platform.""" - session: SHCSession = hass.data[DOMAIN][config_entry.entry_id][DATA_SESSION] + session = config_entry.runtime_data entities: list[BinarySensorEntity] = [ ShutterContactSensor( diff --git a/homeassistant/components/bosch_shc/const.py b/homeassistant/components/bosch_shc/const.py index ccb1f2094cb..07ec3b7da85 100644 --- a/homeassistant/components/bosch_shc/const.py +++ b/homeassistant/components/bosch_shc/const.py @@ -6,7 +6,4 @@ CONF_SHC_KEY = "bosch_shc-key.pem" CONF_SSL_CERTIFICATE = "ssl_certificate" CONF_SSL_KEY = "ssl_key" -DATA_SESSION = "session" -DATA_POLLING_HANDLER = "polling_handler" - DOMAIN = "bosch_shc" diff --git a/homeassistant/components/bosch_shc/cover.py b/homeassistant/components/bosch_shc/cover.py index 5377f0c6a8f..55d6bfc35de 100644 --- a/homeassistant/components/bosch_shc/cover.py +++ b/homeassistant/components/bosch_shc/cover.py @@ -2,7 +2,7 @@ from typing import Any -from boschshcpy import SHCSession, SHCShutterControl +from boschshcpy import SHCShutterControl from homeassistant.components.cover import ( ATTR_POSITION, @@ -10,22 +10,20 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_SESSION, DOMAIN +from . import BoschConfigEntry from .entity import SHCEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BoschConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the SHC cover platform.""" - - session: SHCSession = hass.data[DOMAIN][config_entry.entry_id][DATA_SESSION] + session = config_entry.runtime_data async_add_entities( ShutterControlCover( diff --git a/homeassistant/components/bosch_shc/sensor.py b/homeassistant/components/bosch_shc/sensor.py index 28f23cd9765..6408e21654e 100644 --- a/homeassistant/components/bosch_shc/sensor.py +++ b/homeassistant/components/bosch_shc/sensor.py @@ -6,7 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Any -from boschshcpy import SHCSession from boschshcpy.device import SHCDevice from homeassistant.components.sensor import ( @@ -15,7 +14,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, @@ -27,7 +25,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DATA_SESSION, DOMAIN +from . import BoschConfigEntry from .entity import SHCEntity @@ -127,11 +125,11 @@ SENSOR_DESCRIPTIONS: dict[str, SHCSensorEntityDescription] = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BoschConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the SHC sensor platform.""" - session: SHCSession = hass.data[DOMAIN][config_entry.entry_id][DATA_SESSION] + session = config_entry.runtime_data entities: list[SensorEntity] = [ SHCSensor( diff --git a/homeassistant/components/bosch_shc/switch.py b/homeassistant/components/bosch_shc/switch.py index 58370a120f2..76b1da3e534 100644 --- a/homeassistant/components/bosch_shc/switch.py +++ b/homeassistant/components/bosch_shc/switch.py @@ -9,7 +9,6 @@ from boschshcpy import ( SHCCamera360, SHCCameraEyes, SHCLightSwitch, - SHCSession, SHCSmartPlug, SHCSmartPlugCompact, ) @@ -20,13 +19,12 @@ from homeassistant.components.switch import ( SwitchEntity, SwitchEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DATA_SESSION, DOMAIN +from . import BoschConfigEntry from .entity import SHCEntity @@ -80,11 +78,11 @@ SWITCH_TYPES: dict[str, SHCSwitchEntityDescription] = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BoschConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the SHC switch platform.""" - session: SHCSession = hass.data[DOMAIN][config_entry.entry_id][DATA_SESSION] + session = config_entry.runtime_data entities: list[SwitchEntity] = [ SHCSwitch( diff --git a/homeassistant/components/canary/__init__.py b/homeassistant/components/canary/__init__.py index f879c308a88..a28c37580ce 100644 --- a/homeassistant/components/canary/__init__.py +++ b/homeassistant/components/canary/__init__.py @@ -11,7 +11,7 @@ from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -20,13 +20,11 @@ from homeassistant.helpers.typing import ConfigType from .const import ( CONF_FFMPEG_ARGUMENTS, - DATA_COORDINATOR, - DATA_UNDO_UPDATE_LISTENER, DEFAULT_FFMPEG_ARGUMENTS, DEFAULT_TIMEOUT, DOMAIN, ) -from .coordinator import CanaryDataUpdateCoordinator +from .coordinator import CanaryConfigEntry, CanaryDataUpdateCoordinator _LOGGER: Final = logging.getLogger(__name__) @@ -59,8 +57,6 @@ PLATFORMS: Final[list[Platform]] = [ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Canary integration.""" - hass.data.setdefault(DOMAIN, {}) - if hass.config_entries.async_entries(DOMAIN): return True @@ -90,7 +86,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: CanaryConfigEntry) -> bool: """Set up Canary from a config entry.""" if not entry.options: options = { @@ -107,38 +103,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Unable to connect to Canary service: %s", str(error)) raise ConfigEntryNotReady from error - coordinator = CanaryDataUpdateCoordinator(hass, api=canary_api) + coordinator = CanaryDataUpdateCoordinator(hass, entry, api=canary_api) await coordinator.async_config_entry_first_refresh() - undo_listener = entry.add_update_listener(_async_update_listener) + entry.async_on_unload(entry.add_update_listener(_async_update_listener)) - hass.data[DOMAIN][entry.entry_id] = { - DATA_COORDINATOR: coordinator, - DATA_UNDO_UPDATE_LISTENER: undo_listener, - } + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: CanaryConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if unload_ok: - hass.data[DOMAIN][entry.entry_id][DATA_UNDO_UPDATE_LISTENER]() - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_update_listener(hass: HomeAssistant, entry: CanaryConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) -def _get_canary_api_instance(entry: ConfigEntry) -> Api: +def _get_canary_api_instance(entry: CanaryConfigEntry) -> Api: """Initialize a new instance of CanaryApi.""" return Api( entry.data[CONF_USERNAME], diff --git a/homeassistant/components/canary/alarm_control_panel.py b/homeassistant/components/canary/alarm_control_panel.py index 69600e4bbc7..443944da8c3 100644 --- a/homeassistant/components/canary/alarm_control_panel.py +++ b/homeassistant/components/canary/alarm_control_panel.py @@ -12,24 +12,20 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntityFeature, AlarmControlPanelState, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DATA_COORDINATOR, DOMAIN -from .coordinator import CanaryDataUpdateCoordinator +from .coordinator import CanaryConfigEntry, CanaryDataUpdateCoordinator async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CanaryConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Canary alarm control panels based on a config entry.""" - coordinator: CanaryDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id][ - DATA_COORDINATOR - ] + coordinator = entry.runtime_data alarms = [ CanaryAlarm(coordinator, location) for location_id, location in coordinator.data["locations"].items() diff --git a/homeassistant/components/canary/camera.py b/homeassistant/components/canary/camera.py index a56d1ebc3de..8f4a01c9968 100644 --- a/homeassistant/components/canary/camera.py +++ b/homeassistant/components/canary/camera.py @@ -18,7 +18,6 @@ from homeassistant.components.camera import ( Camera, ) from homeassistant.components.ffmpeg import FFmpegManager, get_ffmpeg_manager -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream @@ -27,14 +26,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .const import ( - CONF_FFMPEG_ARGUMENTS, - DATA_COORDINATOR, - DEFAULT_FFMPEG_ARGUMENTS, - DOMAIN, - MANUFACTURER, -) -from .coordinator import CanaryDataUpdateCoordinator +from .const import CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS, DOMAIN, MANUFACTURER +from .coordinator import CanaryConfigEntry, CanaryDataUpdateCoordinator FORCE_CAMERA_REFRESH_INTERVAL: Final = timedelta(minutes=15) @@ -54,13 +47,11 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CanaryConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Canary sensors based on a config entry.""" - coordinator: CanaryDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id][ - DATA_COORDINATOR - ] + coordinator = entry.runtime_data ffmpeg_arguments: str = entry.options.get( CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS ) diff --git a/homeassistant/components/canary/const.py b/homeassistant/components/canary/const.py index 210da35c7c1..9b9229c3ac3 100644 --- a/homeassistant/components/canary/const.py +++ b/homeassistant/components/canary/const.py @@ -9,10 +9,6 @@ MANUFACTURER: Final = "Canary Connect, Inc" # Configuration CONF_FFMPEG_ARGUMENTS: Final = "ffmpeg_arguments" -# Data -DATA_COORDINATOR: Final = "coordinator" -DATA_UNDO_UPDATE_LISTENER: Final = "undo_update_listener" - # Defaults DEFAULT_FFMPEG_ARGUMENTS: Final = "-pred 1" DEFAULT_TIMEOUT: Final = 10 diff --git a/homeassistant/components/canary/coordinator.py b/homeassistant/components/canary/coordinator.py index d58d1da0f79..7c90074f81a 100644 --- a/homeassistant/components/canary/coordinator.py +++ b/homeassistant/components/canary/coordinator.py @@ -11,6 +11,7 @@ from canary.api import Api from canary.model import Location, Reading from requests.exceptions import ConnectTimeout, HTTPError +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -20,10 +21,15 @@ from .model import CanaryData _LOGGER = logging.getLogger(__name__) +type CanaryConfigEntry = ConfigEntry[CanaryDataUpdateCoordinator] + + class CanaryDataUpdateCoordinator(DataUpdateCoordinator[CanaryData]): """Class to manage fetching Canary data.""" - def __init__(self, hass: HomeAssistant, *, api: Api) -> None: + def __init__( + self, hass: HomeAssistant, config_entry: CanaryConfigEntry, *, api: Api + ) -> None: """Initialize global Canary data updater.""" self.canary = api update_interval = timedelta(seconds=30) @@ -31,6 +37,7 @@ class CanaryDataUpdateCoordinator(DataUpdateCoordinator[CanaryData]): super().__init__( hass, _LOGGER, + config_entry=config_entry, name=DOMAIN, update_interval=update_interval, ) diff --git a/homeassistant/components/canary/sensor.py b/homeassistant/components/canary/sensor.py index 9aab4698bf3..22f3eada2cb 100644 --- a/homeassistant/components/canary/sensor.py +++ b/homeassistant/components/canary/sensor.py @@ -7,7 +7,6 @@ from typing import Final from canary.model import Device, Location, SensorType from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, @@ -18,8 +17,8 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DATA_COORDINATOR, DOMAIN, MANUFACTURER -from .coordinator import CanaryDataUpdateCoordinator +from .const import DOMAIN, MANUFACTURER +from .coordinator import CanaryConfigEntry, CanaryDataUpdateCoordinator type SensorTypeItem = tuple[ str, str | None, str | None, SensorDeviceClass | None, list[str] @@ -64,13 +63,11 @@ STATE_AIR_QUALITY_VERY_ABNORMAL: Final = "very_abnormal" async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CanaryConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Canary sensors based on a config entry.""" - coordinator: CanaryDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id][ - DATA_COORDINATOR - ] + coordinator = entry.runtime_data sensors: list[CanarySensor] = [] for location in coordinator.data["locations"].values(): diff --git a/homeassistant/components/ccm15/__init__.py b/homeassistant/components/ccm15/__init__.py index a35568047ad..eae5d095ce7 100644 --- a/homeassistant/components/ccm15/__init__.py +++ b/homeassistant/components/ccm15/__init__.py @@ -2,34 +2,30 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import CCM15Coordinator +from .coordinator import CCM15ConfigEntry, CCM15Coordinator PLATFORMS: list[Platform] = [Platform.CLIMATE] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: CCM15ConfigEntry) -> bool: """Set up Midea ccm15 AC Controller from a config entry.""" coordinator = CCM15Coordinator( hass, + entry, entry.data[CONF_HOST], entry.data[CONF_PORT], ) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: CCM15ConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ccm15/climate.py b/homeassistant/components/ccm15/climate.py index 3db8c3e1016..099b91ec02c 100644 --- a/homeassistant/components/ccm15/climate.py +++ b/homeassistant/components/ccm15/climate.py @@ -17,7 +17,6 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo @@ -25,18 +24,18 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONST_CMD_FAN_MAP, CONST_CMD_STATE_MAP, DOMAIN -from .coordinator import CCM15Coordinator +from .coordinator import CCM15ConfigEntry, CCM15Coordinator _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: CCM15ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up all climate.""" - coordinator: CCM15Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data ac_data: CCM15DeviceState = coordinator.data entities = [ diff --git a/homeassistant/components/ccm15/coordinator.py b/homeassistant/components/ccm15/coordinator.py index cd3b313f700..03a59aa3f24 100644 --- a/homeassistant/components/ccm15/coordinator.py +++ b/homeassistant/components/ccm15/coordinator.py @@ -7,6 +7,7 @@ from ccm15 import CCM15Device, CCM15DeviceState, CCM15SlaveDevice import httpx from homeassistant.components.climate import HVACMode +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -19,15 +20,20 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +type CCM15ConfigEntry = ConfigEntry[CCM15Coordinator] + class CCM15Coordinator(DataUpdateCoordinator[CCM15DeviceState]): """Class to coordinate multiple CCM15Climate devices.""" - def __init__(self, hass: HomeAssistant, host: str, port: int) -> None: + def __init__( + self, hass: HomeAssistant, entry: CCM15ConfigEntry, host: str, port: int + ) -> None: """Initialize the coordinator.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=host, update_interval=datetime.timedelta(seconds=DEFAULT_INTERVAL), ) diff --git a/homeassistant/components/ccm15/diagnostics.py b/homeassistant/components/ccm15/diagnostics.py index 08cc239e972..c259e7f35c9 100644 --- a/homeassistant/components/ccm15/diagnostics.py +++ b/homeassistant/components/ccm15/diagnostics.py @@ -4,18 +4,16 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import CCM15Coordinator +from .coordinator import CCM15ConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: CCM15ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: CCM15Coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data return { str(device_id): { diff --git a/homeassistant/components/cloudflare/__init__.py b/homeassistant/components/cloudflare/__init__.py index bd27be71d18..f8fbac396a6 100644 --- a/homeassistant/components/cloudflare/__init__.py +++ b/homeassistant/components/cloudflare/__init__.py @@ -74,9 +74,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async_track_time_interval(hass, update_records, update_interval) ) - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = {} - hass.services.async_register(DOMAIN, SERVICE_UPDATE_RECORDS, update_records_service) return True @@ -84,7 +81,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Cloudflare config entry.""" - hass.data[DOMAIN].pop(entry.entry_id) return True diff --git a/homeassistant/components/coinbase/__init__.py b/homeassistant/components/coinbase/__init__.py index 6aa33a7c14d..a29154d9c1b 100644 --- a/homeassistant/components/coinbase/__init__.py +++ b/homeassistant/components/coinbase/__init__.py @@ -37,7 +37,6 @@ from .const import ( CONF_CURRENCIES, CONF_EXCHANGE_BASE, CONF_EXCHANGE_RATES, - DOMAIN, ) _LOGGER = logging.getLogger(__name__) @@ -45,33 +44,29 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.SENSOR] MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) +type CoinbaseConfigEntry = ConfigEntry[CoinbaseData] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) -> bool: """Set up Coinbase from a config entry.""" instance = await hass.async_add_executor_job(create_and_update_instance, entry) entry.async_on_unload(entry.add_update_listener(update_listener)) - hass.data.setdefault(DOMAIN, {}) - - hass.data[DOMAIN][entry.entry_id] = instance + entry.runtime_data = instance await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -def create_and_update_instance(entry: ConfigEntry) -> CoinbaseData: +def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData: """Create and update a Coinbase Data instance.""" if "organizations" not in entry.data[CONF_API_KEY]: client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN]) @@ -87,7 +82,9 @@ def create_and_update_instance(entry: ConfigEntry) -> CoinbaseData: return instance -async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def update_listener( + hass: HomeAssistant, config_entry: CoinbaseConfigEntry +) -> None: """Handle options update.""" await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/coinbase/config_flow.py b/homeassistant/components/coinbase/config_flow.py index 8b7b4b9e313..2b58f2b2f37 100644 --- a/homeassistant/components/coinbase/config_flow.py +++ b/homeassistant/components/coinbase/config_flow.py @@ -11,18 +11,13 @@ from coinbase.wallet.client import Client as LegacyClient from coinbase.wallet.error import AuthenticationError import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv -from . import get_accounts +from . import CoinbaseConfigEntry, get_accounts from .const import ( ACCOUNT_IS_VAULT, API_ACCOUNT_CURRENCY, @@ -83,10 +78,12 @@ async def validate_api(hass: HomeAssistant, data): return {"title": user, "api_version": api_version} -async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, options): +async def validate_options( + hass: HomeAssistant, config_entry: CoinbaseConfigEntry, options +): """Validate the requested resources are provided by API.""" - client = hass.data[DOMAIN][config_entry.entry_id].client + client = config_entry.runtime_data.client accounts = await hass.async_add_executor_job( get_accounts, client, config_entry.data.get("api_version", "v2") @@ -155,7 +152,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: CoinbaseConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" return OptionsFlowHandler() diff --git a/homeassistant/components/coinbase/diagnostics.py b/homeassistant/components/coinbase/diagnostics.py index 674ce9dca28..f391b1a14f5 100644 --- a/homeassistant/components/coinbase/diagnostics.py +++ b/homeassistant/components/coinbase/diagnostics.py @@ -3,12 +3,11 @@ from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_ID from homeassistant.core import HomeAssistant -from . import CoinbaseData -from .const import API_ACCOUNT_AMOUNT, API_RESOURCE_PATH, CONF_TITLE, DOMAIN +from . import CoinbaseConfigEntry +from .const import API_ACCOUNT_AMOUNT, API_RESOURCE_PATH, CONF_TITLE TO_REDACT = { API_ACCOUNT_AMOUNT, @@ -21,15 +20,13 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: CoinbaseConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - instance: CoinbaseData = hass.data[DOMAIN][entry.entry_id] - return async_redact_data( { "entry": entry.as_dict(), - "accounts": instance.accounts, + "accounts": entry.runtime_data.accounts, }, TO_REDACT, ) diff --git a/homeassistant/components/coinbase/sensor.py b/homeassistant/components/coinbase/sensor.py index d3f3c81fb0c..37509160247 100644 --- a/homeassistant/components/coinbase/sensor.py +++ b/homeassistant/components/coinbase/sensor.py @@ -5,12 +5,11 @@ from __future__ import annotations import logging from homeassistant.components.sensor import SensorEntity, SensorStateClass -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import CoinbaseData +from . import CoinbaseConfigEntry, CoinbaseData from .const import ( ACCOUNT_IS_VAULT, API_ACCOUNT_AMOUNT, @@ -45,11 +44,11 @@ ATTRIBUTION = "Data provided by coinbase.com" async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: CoinbaseConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Coinbase sensor platform.""" - instance: CoinbaseData = hass.data[DOMAIN][config_entry.entry_id] + instance = config_entry.runtime_data entities: list[SensorEntity] = [] diff --git a/homeassistant/components/comelit/__init__.py b/homeassistant/components/comelit/__init__.py index 12f28ef206d..60a4e40140d 100644 --- a/homeassistant/components/comelit/__init__.py +++ b/homeassistant/components/comelit/__init__.py @@ -2,12 +2,16 @@ from aiocomelit.const import BRIDGE -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE, Platform from homeassistant.core import HomeAssistant -from .const import DEFAULT_PORT, DOMAIN -from .coordinator import ComelitBaseCoordinator, ComelitSerialBridge, ComelitVedoSystem +from .const import DEFAULT_PORT +from .coordinator import ( + ComelitBaseCoordinator, + ComelitConfigEntry, + ComelitSerialBridge, + ComelitVedoSystem, +) BRIDGE_PLATFORMS = [ Platform.CLIMATE, @@ -24,13 +28,14 @@ VEDO_PLATFORMS = [ ] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> bool: """Set up Comelit platform.""" coordinator: ComelitBaseCoordinator if entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE: coordinator = ComelitSerialBridge( hass, + entry, entry.data[CONF_HOST], entry.data.get(CONF_PORT, DEFAULT_PORT), entry.data[CONF_PIN], @@ -39,6 +44,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: else: coordinator = ComelitVedoSystem( hass, + entry, entry.data[CONF_HOST], entry.data.get(CONF_PORT, DEFAULT_PORT), entry.data[CONF_PIN], @@ -47,14 +53,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, platforms) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> bool: """Unload a config entry.""" if entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE: @@ -62,10 +68,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: else: platforms = VEDO_PLATFORMS - coordinator: ComelitBaseCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data if unload_ok := await hass.config_entries.async_unload_platforms(entry, platforms): await coordinator.api.logout() await coordinator.api.close() - hass.data[DOMAIN].pop(entry.entry_id) return unload_ok diff --git a/homeassistant/components/comelit/alarm_control_panel.py b/homeassistant/components/comelit/alarm_control_panel.py index b3bd6664bf8..f694c2b392b 100644 --- a/homeassistant/components/comelit/alarm_control_panel.py +++ b/homeassistant/components/comelit/alarm_control_panel.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import cast from aiocomelit.api import ComelitVedoAreaObject from aiocomelit.const import ALARM_AREAS, AlarmAreaState @@ -13,13 +14,11 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelState, CodeFormat, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import ComelitVedoSystem +from .coordinator import ComelitConfigEntry, ComelitVedoSystem _LOGGER = logging.getLogger(__name__) @@ -48,12 +47,12 @@ ALARM_AREA_ARMED_STATUS: dict[str, int] = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Comelit VEDO system alarm control panel devices.""" - coordinator: ComelitVedoSystem = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) async_add_entities( ComelitAlarmEntity(coordinator, device, config_entry.entry_id) diff --git a/homeassistant/components/comelit/binary_sensor.py b/homeassistant/components/comelit/binary_sensor.py index 30b642584f8..fa51e0b1fda 100644 --- a/homeassistant/components/comelit/binary_sensor.py +++ b/homeassistant/components/comelit/binary_sensor.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import cast + from aiocomelit import ComelitVedoZoneObject from aiocomelit.const import ALARM_ZONES @@ -9,23 +11,21 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import ComelitVedoSystem +from .coordinator import ComelitConfigEntry, ComelitVedoSystem async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit VEDO presence sensors.""" - coordinator: ComelitVedoSystem = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) async_add_entities( ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id) diff --git a/homeassistant/components/comelit/climate.py b/homeassistant/components/comelit/climate.py index 6dc7c7e26d9..1baa777bf99 100644 --- a/homeassistant/components/comelit/climate.py +++ b/homeassistant/components/comelit/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from enum import StrEnum -from typing import Any +from typing import Any, cast from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import CLIMATE @@ -15,14 +15,12 @@ from homeassistant.components.climate import ( HVACMode, UnitOfTemperature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import ComelitSerialBridge +from .coordinator import ComelitConfigEntry, ComelitSerialBridge class ClimaComelitMode(StrEnum): @@ -72,12 +70,12 @@ MODE_TO_ACTION: dict[HVACMode, ClimaComelitCommand] = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit climates.""" - coordinator: ComelitSerialBridge = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) async_add_entities( ComelitClimateEntity(coordinator, device, config_entry.entry_id) diff --git a/homeassistant/components/comelit/coordinator.py b/homeassistant/components/comelit/coordinator.py index 807f389a6d3..fcb149b21d6 100644 --- a/homeassistant/components/comelit/coordinator.py +++ b/homeassistant/components/comelit/coordinator.py @@ -23,15 +23,19 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import _LOGGER, DOMAIN +type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator] + class ComelitBaseCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Base coordinator for Comelit Devices.""" _hw_version: str - config_entry: ConfigEntry + config_entry: ComelitConfigEntry api: ComelitCommonApi - def __init__(self, hass: HomeAssistant, device: str, host: str) -> None: + def __init__( + self, hass: HomeAssistant, entry: ComelitConfigEntry, device: str, host: str + ) -> None: """Initialize the scanner.""" self._device = device @@ -40,13 +44,14 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[dict[str, Any]]): super().__init__( hass=hass, logger=_LOGGER, + config_entry=entry, name=f"{DOMAIN}-{host}-coordinator", update_interval=timedelta(seconds=5), ) device_registry = dr.async_get(self.hass) device_registry.async_get_or_create( - config_entry_id=self.config_entry.entry_id, - identifiers={(DOMAIN, self.config_entry.entry_id)}, + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, entry.entry_id)}, model=device, name=f"{device} ({self._host})", manufacturer="Comelit", @@ -98,10 +103,17 @@ class ComelitSerialBridge(ComelitBaseCoordinator): _hw_version = "20003101" api: ComeliteSerialBridgeApi - def __init__(self, hass: HomeAssistant, host: str, port: int, pin: int) -> None: + def __init__( + self, + hass: HomeAssistant, + entry: ComelitConfigEntry, + host: str, + port: int, + pin: int, + ) -> None: """Initialize the scanner.""" self.api = ComeliteSerialBridgeApi(host, port, pin) - super().__init__(hass, BRIDGE, host) + super().__init__(hass, entry, BRIDGE, host) async def _async_update_system_data(self) -> dict[str, Any]: """Specific method for updating data.""" @@ -114,10 +126,17 @@ class ComelitVedoSystem(ComelitBaseCoordinator): _hw_version = "VEDO IP" api: ComelitVedoApi - def __init__(self, hass: HomeAssistant, host: str, port: int, pin: int) -> None: + def __init__( + self, + hass: HomeAssistant, + entry: ComelitConfigEntry, + host: str, + port: int, + pin: int, + ) -> None: """Initialize the scanner.""" self.api = ComelitVedoApi(host, port, pin) - super().__init__(hass, VEDO, host) + super().__init__(hass, entry, VEDO, host) async def _async_update_system_data(self) -> dict[str, Any]: """Specific method for updating data.""" diff --git a/homeassistant/components/comelit/cover.py b/homeassistant/components/comelit/cover.py index 5169217ebc5..abb84824621 100644 --- a/homeassistant/components/comelit/cover.py +++ b/homeassistant/components/comelit/cover.py @@ -2,30 +2,28 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import ComelitSerialBridge +from .coordinator import ComelitConfigEntry, ComelitSerialBridge async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit covers.""" - coordinator: ComelitSerialBridge = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) async_add_entities( ComelitCoverEntity(coordinator, device, config_entry.entry_id) diff --git a/homeassistant/components/comelit/diagnostics.py b/homeassistant/components/comelit/diagnostics.py index afa57831eae..547735f3879 100644 --- a/homeassistant/components/comelit/diagnostics.py +++ b/homeassistant/components/comelit/diagnostics.py @@ -12,22 +12,20 @@ from aiocomelit import ( from aiocomelit.const import BRIDGE from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PIN, CONF_TYPE from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import ComelitBaseCoordinator +from .coordinator import ComelitConfigEntry TO_REDACT = {CONF_PIN} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: ComelitConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: ComelitBaseCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data dev_list: list[dict[str, Any]] = [] dev_type_list: list[dict[int, Any]] = [] diff --git a/homeassistant/components/comelit/humidifier.py b/homeassistant/components/comelit/humidifier.py index e7857535c78..d8058074c16 100644 --- a/homeassistant/components/comelit/humidifier.py +++ b/homeassistant/components/comelit/humidifier.py @@ -3,7 +3,7 @@ from __future__ import annotations from enum import StrEnum -from typing import Any +from typing import Any, cast from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import CLIMATE @@ -16,14 +16,13 @@ from homeassistant.components.humidifier import ( HumidifierEntity, HumidifierEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import ComelitSerialBridge +from .coordinator import ComelitConfigEntry, ComelitSerialBridge class HumidifierComelitMode(StrEnum): @@ -55,12 +54,12 @@ MODE_TO_ACTION: dict[str, HumidifierComelitCommand] = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit humidifiers.""" - coordinator: ComelitSerialBridge = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) entities: list[ComelitHumidifierEntity] = [] for device in coordinator.data[CLIMATE].values(): diff --git a/homeassistant/components/comelit/light.py b/homeassistant/components/comelit/light.py index bb5eb5fa160..9736c9ac2a0 100644 --- a/homeassistant/components/comelit/light.py +++ b/homeassistant/components/comelit/light.py @@ -2,29 +2,27 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import LIGHT, STATE_OFF, STATE_ON from homeassistant.components.light import ColorMode, LightEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import ComelitSerialBridge +from .coordinator import ComelitConfigEntry, ComelitSerialBridge async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit lights.""" - coordinator: ComelitSerialBridge = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) async_add_entities( ComelitLightEntity(coordinator, device, config_entry.entry_id) diff --git a/homeassistant/components/comelit/sensor.py b/homeassistant/components/comelit/sensor.py index a86d49d73e9..efb2418244e 100644 --- a/homeassistant/components/comelit/sensor.py +++ b/homeassistant/components/comelit/sensor.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Final +from typing import Final, cast from aiocomelit import ComelitSerialBridgeObject, ComelitVedoZoneObject from aiocomelit.const import ALARM_ZONES, BRIDGE, OTHER, AlarmZoneState @@ -12,15 +12,13 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_TYPE, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import ComelitSerialBridge, ComelitVedoSystem +from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem SENSOR_BRIDGE_TYPES: Final = ( SensorEntityDescription( @@ -43,7 +41,7 @@ SENSOR_VEDO_TYPES: Final = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit sensors.""" @@ -56,12 +54,12 @@ async def async_setup_entry( async def async_setup_bridge_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit Bridge sensors.""" - coordinator: ComelitSerialBridge = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) entities: list[ComelitBridgeSensorEntity] = [] for device in coordinator.data[OTHER].values(): @@ -76,12 +74,12 @@ async def async_setup_bridge_entry( async def async_setup_vedo_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit VEDO sensors.""" - coordinator: ComelitVedoSystem = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) entities: list[ComelitVedoSensorEntity] = [] for device in coordinator.data[ALARM_ZONES].values(): diff --git a/homeassistant/components/comelit/switch.py b/homeassistant/components/comelit/switch.py index 68ba934adb6..26d3b81ebde 100644 --- a/homeassistant/components/comelit/switch.py +++ b/homeassistant/components/comelit/switch.py @@ -2,29 +2,27 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import IRRIGATION, OTHER, STATE_OFF, STATE_ON from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import ComelitSerialBridge +from .coordinator import ComelitConfigEntry, ComelitSerialBridge async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ComelitConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Comelit switches.""" - coordinator: ComelitSerialBridge = hass.data[DOMAIN][config_entry.entry_id] + coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) entities: list[ComelitSwitchEntity] = [] entities.extend( diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 1d79709adf8..bb815698941 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -798,36 +798,13 @@ class DefaultAgent(ConversationEntity): intent_response: intent.IntentResponse, recognize_result: RecognizeResult, ) -> str: - # Make copies of the states here so we can add translated names for responses. - matched = [ - state_copy - for state in intent_response.matched_states - if (state_copy := core.State.from_dict(state.as_dict())) - ] - unmatched = [ - state_copy - for state in intent_response.unmatched_states - if (state_copy := core.State.from_dict(state.as_dict())) - ] - all_states = matched + unmatched - domains = {state.domain for state in all_states} - translations = await translation.async_get_translations( - self.hass, language, "entity_component", domains - ) - - # Use translated state names - for state in all_states: - device_class = state.attributes.get("device_class", "_") - key = f"component.{state.domain}.entity_component.{device_class}.state.{state.state}" - state.state = translations.get(key, state.state) - # Get first matched or unmatched state. # This is available in the response template as "state". state1: core.State | None = None if intent_response.matched_states: - state1 = matched[0] + state1 = intent_response.matched_states[0] elif intent_response.unmatched_states: - state1 = unmatched[0] + state1 = intent_response.unmatched_states[0] # Render response template speech_slots = { @@ -849,11 +826,13 @@ class DefaultAgent(ConversationEntity): "query": { # Entity states that matched the query (e.g, "on") "matched": [ - template.TemplateState(self.hass, state) for state in matched + template.TemplateState(self.hass, state) + for state in intent_response.matched_states ], # Entity states that did not match the query "unmatched": [ - template.TemplateState(self.hass, state) for state in unmatched + template.TemplateState(self.hass, state) + for state in intent_response.unmatched_states ], }, } @@ -1506,12 +1485,6 @@ def _get_match_error_response( # Entity is not in correct state assert constraints.states state = next(iter(constraints.states)) - if constraints.domains: - # Translate if domain is available - domain = next(iter(constraints.domains)) - state = translation.async_translate_state( - hass, state, domain, None, None, None - ) return ErrorKey.ENTITY_WRONG_STATE, {"state": state} diff --git a/homeassistant/components/coolmaster/__init__.py b/homeassistant/components/coolmaster/__init__.py index 1f3f5a66380..5892ef091d9 100644 --- a/homeassistant/components/coolmaster/__init__.py +++ b/homeassistant/components/coolmaster/__init__.py @@ -2,18 +2,17 @@ from pycoolmasternet_async import CoolMasterNet -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import CONF_SWING_SUPPORT, DATA_COORDINATOR, DATA_INFO, DOMAIN -from .coordinator import CoolmasterDataUpdateCoordinator +from .const import CONF_SWING_SUPPORT +from .coordinator import CoolmasterConfigEntry, CoolmasterDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -> bool: """Set up Coolmaster from a config entry.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] @@ -38,21 +37,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryNotReady except OSError as error: raise ConfigEntryNotReady from error - coordinator = CoolmasterDataUpdateCoordinator(hass, coolmaster) - hass.data.setdefault(DOMAIN, {}) + coordinator = CoolmasterDataUpdateCoordinator(hass, entry, coolmaster, info) await coordinator.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id] = { - DATA_INFO: info, - DATA_COORDINATOR: coordinator, - } + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -> bool: """Unload a Coolmaster config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/coolmaster/binary_sensor.py b/homeassistant/components/coolmaster/binary_sensor.py index ba54a073f0a..ab2718b9352 100644 --- a/homeassistant/components/coolmaster/binary_sensor.py +++ b/homeassistant/components/coolmaster/binary_sensor.py @@ -7,26 +7,23 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_COORDINATOR, DATA_INFO, DOMAIN +from .coordinator import CoolmasterConfigEntry from .entity import CoolmasterEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: CoolmasterConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CoolMasterNet binary_sensor platform.""" - info = hass.data[DOMAIN][config_entry.entry_id][DATA_INFO] - coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR] + coordinator = config_entry.runtime_data async_add_entities( - CoolmasterCleanFilter(coordinator, unit_id, info) - for unit_id in coordinator.data + CoolmasterCleanFilter(coordinator, unit_id) for unit_id in coordinator.data ) diff --git a/homeassistant/components/coolmaster/button.py b/homeassistant/components/coolmaster/button.py index d958346614c..5463566d1ef 100644 --- a/homeassistant/components/coolmaster/button.py +++ b/homeassistant/components/coolmaster/button.py @@ -3,26 +3,23 @@ from __future__ import annotations from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_COORDINATOR, DATA_INFO, DOMAIN +from .coordinator import CoolmasterConfigEntry from .entity import CoolmasterEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: CoolmasterConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CoolMasterNet button platform.""" - info = hass.data[DOMAIN][config_entry.entry_id][DATA_INFO] - coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR] + coordinator = config_entry.runtime_data async_add_entities( - CoolmasterResetFilter(coordinator, unit_id, info) - for unit_id in coordinator.data + CoolmasterResetFilter(coordinator, unit_id) for unit_id in coordinator.data ) diff --git a/homeassistant/components/coolmaster/climate.py b/homeassistant/components/coolmaster/climate.py index 29be416d57e..cd1659e1666 100644 --- a/homeassistant/components/coolmaster/climate.py +++ b/homeassistant/components/coolmaster/climate.py @@ -12,13 +12,13 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import CONF_SUPPORTED_MODES, DATA_COORDINATOR, DATA_INFO, DOMAIN +from .const import CONF_SUPPORTED_MODES +from .coordinator import CoolmasterConfigEntry, CoolmasterDataUpdateCoordinator from .entity import CoolmasterEntity CM_TO_HA_STATE = { @@ -38,15 +38,16 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: CoolmasterConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CoolMasterNet climate platform.""" - info = hass.data[DOMAIN][config_entry.entry_id][DATA_INFO] - coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR] - supported_modes = config_entry.data.get(CONF_SUPPORTED_MODES) + coordinator = config_entry.runtime_data + supported_modes: list[str] = config_entry.data[CONF_SUPPORTED_MODES] async_add_entities( - CoolmasterClimate(coordinator, unit_id, info, supported_modes) + CoolmasterClimate( + coordinator, unit_id, [HVACMode(mode) for mode in supported_modes] + ) for unit_id in coordinator.data ) @@ -56,9 +57,14 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity): _attr_name = None - def __init__(self, coordinator, unit_id, info, supported_modes): + def __init__( + self, + coordinator: CoolmasterDataUpdateCoordinator, + unit_id: str, + supported_modes: list[HVACMode], + ) -> None: """Initialize the climate device.""" - super().__init__(coordinator, unit_id, info) + super().__init__(coordinator, unit_id) self._attr_hvac_modes = supported_modes self._attr_unique_id = unit_id diff --git a/homeassistant/components/coolmaster/const.py b/homeassistant/components/coolmaster/const.py index 1fa46e20ee9..9dd7ed3a444 100644 --- a/homeassistant/components/coolmaster/const.py +++ b/homeassistant/components/coolmaster/const.py @@ -1,8 +1,5 @@ """Constants for the Coolmaster integration.""" -DATA_INFO = "info" -DATA_COORDINATOR = "coordinator" - DOMAIN = "coolmaster" DEFAULT_PORT = 10102 diff --git a/homeassistant/components/coolmaster/coordinator.py b/homeassistant/components/coolmaster/coordinator.py index 54d69b1c540..b2c96ca12a4 100644 --- a/homeassistant/components/coolmaster/coordinator.py +++ b/homeassistant/components/coolmaster/coordinator.py @@ -1,8 +1,15 @@ """DataUpdateCoordinator for coolmaster integration.""" +from __future__ import annotations + import logging +from pycoolmasternet_async import CoolMasterNet +from pycoolmasternet_async.coolmasternet import CoolMasterNetUnit + from homeassistant.components.climate import SCAN_INTERVAL +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -10,21 +17,34 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -class CoolmasterDataUpdateCoordinator(DataUpdateCoordinator): +type CoolmasterConfigEntry = ConfigEntry[CoolmasterDataUpdateCoordinator] + + +class CoolmasterDataUpdateCoordinator( + DataUpdateCoordinator[dict[str, CoolMasterNetUnit]] +): """Class to manage fetching Coolmaster data.""" - def __init__(self, hass, coolmaster): + def __init__( + self, + hass: HomeAssistant, + entry: CoolmasterConfigEntry, + coolmaster: CoolMasterNet, + info: dict[str, str], + ) -> None: """Initialize global Coolmaster data updater.""" self._coolmaster = coolmaster + self.info = info super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, ) - async def _async_update_data(self): + async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]: """Fetch data from Coolmaster.""" try: return await self._coolmaster.status() diff --git a/homeassistant/components/coolmaster/entity.py b/homeassistant/components/coolmaster/entity.py index 73bd1e13a26..7d7bd8e62ba 100644 --- a/homeassistant/components/coolmaster/entity.py +++ b/homeassistant/components/coolmaster/entity.py @@ -1,7 +1,5 @@ """Base entity for Coolmaster integration.""" -from pycoolmasternet_async.coolmasternet import CoolMasterNetUnit - from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -19,18 +17,17 @@ class CoolmasterEntity(CoordinatorEntity[CoolmasterDataUpdateCoordinator]): self, coordinator: CoolmasterDataUpdateCoordinator, unit_id: str, - info: dict[str, str], ) -> None: """Initiate CoolmasterEntity.""" super().__init__(coordinator) self._unit_id: str = unit_id - self._unit: CoolMasterNetUnit = coordinator.data[self._unit_id] + self._unit = coordinator.data[self._unit_id] self._attr_device_info: DeviceInfo = DeviceInfo( identifiers={(DOMAIN, unit_id)}, manufacturer="CoolAutomation", model="CoolMasterNet", name=unit_id, - sw_version=info["version"], + sw_version=coordinator.info["version"], ) if hasattr(self, "entity_description"): self._attr_unique_id: str = f"{unit_id}-{self.entity_description.key}" diff --git a/homeassistant/components/coolmaster/sensor.py b/homeassistant/components/coolmaster/sensor.py index 4c2a09b1ce5..2b835565bae 100644 --- a/homeassistant/components/coolmaster/sensor.py +++ b/homeassistant/components/coolmaster/sensor.py @@ -3,26 +3,23 @@ from __future__ import annotations from homeassistant.components.sensor import SensorEntity, SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DATA_COORDINATOR, DATA_INFO, DOMAIN +from .coordinator import CoolmasterConfigEntry from .entity import CoolmasterEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: CoolmasterConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CoolMasterNet sensor platform.""" - info = hass.data[DOMAIN][config_entry.entry_id][DATA_INFO] - coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR] + coordinator = config_entry.runtime_data async_add_entities( - CoolmasterCleanFilter(coordinator, unit_id, info) - for unit_id in coordinator.data + CoolmasterCleanFilter(coordinator, unit_id) for unit_id in coordinator.data ) diff --git a/homeassistant/components/daikin/__init__.py b/homeassistant/components/daikin/__init__.py index c58578071ee..0eaffa39ee9 100644 --- a/homeassistant/components/daikin/__init__.py +++ b/homeassistant/components/daikin/__init__.py @@ -9,7 +9,6 @@ from aiohttp import ClientConnectionError from pydaikin.daikin_base import Appliance from pydaikin.factory import DaikinFactory -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_HOST, @@ -23,8 +22,8 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC -from .const import DOMAIN, KEY_MAC, TIMEOUT -from .coordinator import DaikinCoordinator +from .const import KEY_MAC, TIMEOUT +from .coordinator import DaikinConfigEntry, DaikinCoordinator _LOGGER = logging.getLogger(__name__) @@ -32,7 +31,7 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bool: """Establish connection with Daikin.""" conf = entry.data # For backwards compat, set unique ID @@ -58,29 +57,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.debug("ClientConnectionError to %s", host) raise ConfigEntryNotReady from err - coordinator = DaikinCoordinator(hass, device) + coordinator = DaikinCoordinator(hass, entry, device) await coordinator.async_config_entry_first_refresh() await async_migrate_unique_id(hass, entry, device) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - if not hass.data[DOMAIN]: - hass.data.pop(DOMAIN) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def async_migrate_unique_id( - hass: HomeAssistant, config_entry: ConfigEntry, device: Appliance + hass: HomeAssistant, config_entry: DaikinConfigEntry, device: Appliance ) -> None: """Migrate old entry.""" dev_reg = dr.async_get(hass) diff --git a/homeassistant/components/daikin/climate.py b/homeassistant/components/daikin/climate.py index 751683656f2..06ee0a03860 100644 --- a/homeassistant/components/daikin/climate.py +++ b/homeassistant/components/daikin/climate.py @@ -19,12 +19,10 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as DAIKIN_DOMAIN from .const import ( ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, @@ -32,7 +30,7 @@ from .const import ( ATTR_STATE_ON, ATTR_TARGET_TEMPERATURE, ) -from .coordinator import DaikinCoordinator +from .coordinator import DaikinConfigEntry, DaikinCoordinator from .entity import DaikinEntity _LOGGER = logging.getLogger(__name__) @@ -83,10 +81,12 @@ DAIKIN_ATTR_ADVANCED = "adv" async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: DaikinConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Daikin climate based on config_entry.""" - daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) + daikin_api = entry.runtime_data async_add_entities([DaikinClimate(daikin_api)]) diff --git a/homeassistant/components/daikin/coordinator.py b/homeassistant/components/daikin/coordinator.py index 35d998b4ba2..8e1713af5b2 100644 --- a/homeassistant/components/daikin/coordinator.py +++ b/homeassistant/components/daikin/coordinator.py @@ -5,6 +5,7 @@ import logging from pydaikin.daikin_base import Appliance +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -12,15 +13,20 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) +type DaikinConfigEntry = ConfigEntry[DaikinCoordinator] + class DaikinCoordinator(DataUpdateCoordinator[None]): """Class to manage fetching Daikin data.""" - def __init__(self, hass: HomeAssistant, device: Appliance) -> None: + def __init__( + self, hass: HomeAssistant, entry: DaikinConfigEntry, device: Appliance + ) -> None: """Initialize global Daikin data updater.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=device.values.get("name", DOMAIN), update_interval=timedelta(seconds=60), ) diff --git a/homeassistant/components/daikin/sensor.py b/homeassistant/components/daikin/sensor.py index d2d6ef02fc3..982aac1f3f2 100644 --- a/homeassistant/components/daikin/sensor.py +++ b/homeassistant/components/daikin/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, UnitOfEnergy, @@ -24,7 +23,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN as DAIKIN_DOMAIN from .const import ( ATTR_COMPRESSOR_FREQUENCY, ATTR_COOL_ENERGY, @@ -37,7 +35,7 @@ from .const import ( ATTR_TOTAL_ENERGY_TODAY, ATTR_TOTAL_POWER, ) -from .coordinator import DaikinCoordinator +from .coordinator import DaikinConfigEntry, DaikinCoordinator from .entity import DaikinEntity @@ -134,10 +132,12 @@ SENSOR_TYPES: tuple[DaikinSensorEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: DaikinConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Daikin climate based on config_entry.""" - daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id) + daikin_api = entry.runtime_data sensors = [ATTR_INSIDE_TEMPERATURE] if daikin_api.device.support_outside_temperature: sensors.append(ATTR_OUTSIDE_TEMPERATURE) diff --git a/homeassistant/components/daikin/switch.py b/homeassistant/components/daikin/switch.py index 669048ac45e..8a3a15d367f 100644 --- a/homeassistant/components/daikin/switch.py +++ b/homeassistant/components/daikin/switch.py @@ -5,12 +5,10 @@ from __future__ import annotations from typing import Any from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN -from .coordinator import DaikinCoordinator +from .coordinator import DaikinConfigEntry, DaikinCoordinator from .entity import DaikinEntity DAIKIN_ATTR_ADVANCED = "adv" @@ -19,10 +17,12 @@ DAIKIN_ATTR_MODE = "mode" async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: DaikinConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Daikin climate based on config_entry.""" - daikin_api: DaikinCoordinator = hass.data[DOMAIN][entry.entry_id] + daikin_api = entry.runtime_data switches: list[SwitchEntity] = [] if zones := daikin_api.device.zones: switches.extend( diff --git a/homeassistant/components/deconz/__init__.py b/homeassistant/components/deconz/__init__.py index 8007f3217d5..42c81e69740 100644 --- a/homeassistant/components/deconz/__init__.py +++ b/homeassistant/components/deconz/__init__.py @@ -9,12 +9,12 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType -from .config_flow import get_master_hub from .const import CONF_MASTER_GATEWAY, DOMAIN, PLATFORMS from .deconz_event import async_setup_events, async_unload_events from .errors import AuthenticationRequired, CannotConnect from .hub import DeconzHub, get_deconz_api from .services import async_setup_services +from .util import get_master_hub CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -46,7 +46,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b hub = hass.data[DOMAIN][config_entry.entry_id] = DeconzHub(hass, config_entry, api) await hub.async_update_device_registry() - config_entry.add_update_listener(hub.async_config_entry_updated) + config_entry.async_on_unload( + config_entry.add_update_listener(hub.async_config_entry_updated) + ) await async_setup_events(hub) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) diff --git a/homeassistant/components/deconz/config_flow.py b/homeassistant/components/deconz/config_flow.py index 7f5fc96c022..41e45d53c76 100644 --- a/homeassistant/components/deconz/config_flow.py +++ b/homeassistant/components/deconz/config_flow.py @@ -27,7 +27,7 @@ from homeassistant.config_entries import ( OptionsFlow, ) from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.helpers import aiohttp_client from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.service_info.ssdp import ATTR_UPNP_SERIAL, SsdpServiceInfo @@ -51,15 +51,6 @@ CONF_SERIAL = "serial" CONF_MANUAL_INPUT = "Manually define gateway" -@callback -def get_master_hub(hass: HomeAssistant) -> DeconzHub: - """Return the gateway which is marked as master.""" - for hub in hass.data[DOMAIN].values(): - if hub.master: - return cast(DeconzHub, hub) - raise ValueError - - class DeconzFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a deCONZ config flow.""" diff --git a/homeassistant/components/deconz/services.py b/homeassistant/components/deconz/services.py index e10195d86bc..6127fe44308 100644 --- a/homeassistant/components/deconz/services.py +++ b/homeassistant/components/deconz/services.py @@ -12,9 +12,9 @@ from homeassistant.helpers import ( from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC from homeassistant.util.read_only_dict import ReadOnlyDict -from .config_flow import get_master_hub from .const import CONF_BRIDGE_ID, DOMAIN, LOGGER from .hub import DeconzHub +from .util import get_master_hub DECONZ_SERVICES = "deconz_services" diff --git a/homeassistant/components/deconz/util.py b/homeassistant/components/deconz/util.py index 7c44280200d..bcf338b2d6d 100644 --- a/homeassistant/components/deconz/util.py +++ b/homeassistant/components/deconz/util.py @@ -2,9 +2,24 @@ from __future__ import annotations +from homeassistant.core import HomeAssistant, callback + +from .const import DOMAIN +from .hub import DeconzHub + def serial_from_unique_id(unique_id: str | None) -> str | None: """Get a device serial number from a unique ID, if possible.""" if not unique_id or unique_id.count(":") != 7: return None return unique_id.partition("-")[0] + + +@callback +def get_master_hub(hass: HomeAssistant) -> DeconzHub: + """Return the gateway which is marked as master.""" + hub: DeconzHub + for hub in hass.data[DOMAIN].values(): + if hub.master: + return hub + raise ValueError diff --git a/homeassistant/components/denonavr/__init__.py b/homeassistant/components/denonavr/__init__.py index 98b77a994f6..da2b601317a 100644 --- a/homeassistant/components/denonavr/__init__.py +++ b/homeassistant/components/denonavr/__init__.py @@ -12,7 +12,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import entity_registry as er from homeassistant.helpers.httpx_client import get_async_client -from .config_flow import ( +from .const import ( CONF_SHOW_ALL_SOURCES, CONF_UPDATE_AUDYSSEY, CONF_USE_TELNET, @@ -24,21 +24,18 @@ from .config_flow import ( DEFAULT_USE_TELNET, DEFAULT_ZONE2, DEFAULT_ZONE3, - DOMAIN, ) from .receiver import ConnectDenonAVR -CONF_RECEIVER = "receiver" -UNDO_UPDATE_LISTENER = "undo_update_listener" PLATFORMS = [Platform.MEDIA_PLAYER] _LOGGER = logging.getLogger(__name__) +type DenonavrConfigEntry = ConfigEntry[DenonAVR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: DenonavrConfigEntry) -> bool: """Set up the denonavr components from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - # Connect to receiver connect_denonavr = ConnectDenonAVR( entry.data[CONF_HOST], @@ -56,12 +53,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryNotReady from ex receiver = connect_denonavr.receiver - undo_listener = entry.add_update_listener(update_listener) + entry.async_on_unload(entry.add_update_listener(update_listener)) - hass.data[DOMAIN][entry.entry_id] = { - CONF_RECEIVER: receiver, - UNDO_UPDATE_LISTENER: undo_listener, - } + entry.runtime_data = receiver await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) use_telnet = entry.options.get(CONF_USE_TELNET, DEFAULT_USE_TELNET) @@ -79,18 +73,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: DenonavrConfigEntry +) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms( config_entry, PLATFORMS ) if config_entry.options.get(CONF_USE_TELNET, DEFAULT_USE_TELNET): - receiver: DenonAVR = hass.data[DOMAIN][config_entry.entry_id][CONF_RECEIVER] + receiver = config_entry.runtime_data await receiver.async_telnet_disconnect() - hass.data[DOMAIN][config_entry.entry_id][UNDO_UPDATE_LISTENER]() - # Remove zone2 and zone3 entities if needed entity_registry = er.async_get(hass) entries = er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) @@ -105,12 +99,11 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> entity_registry.async_remove(entry.entity_id) _LOGGER.debug("Removing zone3 from DenonAvr") - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - return unload_ok -async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def update_listener( + hass: HomeAssistant, config_entry: DenonavrConfigEntry +) -> None: """Handle options update.""" await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/denonavr/config_flow.py b/homeassistant/components/denonavr/config_flow.py index 9601b67081c..930d0e009ac 100644 --- a/homeassistant/components/denonavr/config_flow.py +++ b/homeassistant/components/denonavr/config_flow.py @@ -10,12 +10,7 @@ import denonavr from denonavr.exceptions import AvrNetworkError, AvrTimoutError import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_TYPE from homeassistant.core import callback from homeassistant.helpers.httpx_client import get_async_client @@ -27,29 +22,30 @@ from homeassistant.helpers.service_info.ssdp import ( SsdpServiceInfo, ) +from . import DenonavrConfigEntry +from .const import ( + CONF_MANUFACTURER, + CONF_SERIAL_NUMBER, + CONF_SHOW_ALL_SOURCES, + CONF_UPDATE_AUDYSSEY, + CONF_USE_TELNET, + CONF_ZONE2, + CONF_ZONE3, + DEFAULT_SHOW_SOURCES, + DEFAULT_TIMEOUT, + DEFAULT_UPDATE_AUDYSSEY, + DEFAULT_USE_TELNET, + DEFAULT_ZONE2, + DEFAULT_ZONE3, + DOMAIN, +) from .receiver import ConnectDenonAVR _LOGGER = logging.getLogger(__name__) -DOMAIN = "denonavr" - SUPPORTED_MANUFACTURERS = ["Denon", "DENON", "DENON PROFESSIONAL", "Marantz"] IGNORED_MODELS = ["HEOS 1", "HEOS 3", "HEOS 5", "HEOS 7"] -CONF_SHOW_ALL_SOURCES = "show_all_sources" -CONF_ZONE2 = "zone2" -CONF_ZONE3 = "zone3" -CONF_MANUFACTURER = "manufacturer" -CONF_SERIAL_NUMBER = "serial_number" -CONF_UPDATE_AUDYSSEY = "update_audyssey" -CONF_USE_TELNET = "use_telnet" - -DEFAULT_SHOW_SOURCES = False -DEFAULT_TIMEOUT = 5 -DEFAULT_ZONE2 = False -DEFAULT_ZONE3 = False -DEFAULT_UPDATE_AUDYSSEY = False -DEFAULT_USE_TELNET = False DEFAULT_USE_TELNET_NEW_INSTALL = True CONFIG_SCHEMA = vol.Schema({vol.Optional(CONF_HOST): str}) @@ -118,7 +114,7 @@ class DenonAvrFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: DenonavrConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" return OptionsFlowHandler() diff --git a/homeassistant/components/denonavr/const.py b/homeassistant/components/denonavr/const.py new file mode 100644 index 00000000000..d28044ec018 --- /dev/null +++ b/homeassistant/components/denonavr/const.py @@ -0,0 +1,19 @@ +"""Constants for Denon AVR.""" + +DOMAIN = "denonavr" + + +CONF_SHOW_ALL_SOURCES = "show_all_sources" +CONF_ZONE2 = "zone2" +CONF_ZONE3 = "zone3" +CONF_MANUFACTURER = "manufacturer" +CONF_SERIAL_NUMBER = "serial_number" +CONF_UPDATE_AUDYSSEY = "update_audyssey" +CONF_USE_TELNET = "use_telnet" + +DEFAULT_SHOW_SOURCES = False +DEFAULT_TIMEOUT = 5 +DEFAULT_ZONE2 = False +DEFAULT_ZONE3 = False +DEFAULT_UPDATE_AUDYSSEY = False +DEFAULT_USE_TELNET = False diff --git a/homeassistant/components/denonavr/media_player.py b/homeassistant/components/denonavr/media_player.py index 03d1b00cfaf..818d530ddab 100644 --- a/homeassistant/components/denonavr/media_player.py +++ b/homeassistant/components/denonavr/media_player.py @@ -35,18 +35,16 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_COMMAND, CONF_HOST, CONF_MODEL +from homeassistant.const import ATTR_COMMAND, CONF_HOST, CONF_MODEL, CONF_TYPE from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import CONF_RECEIVER -from .config_flow import ( +from . import DenonavrConfigEntry +from .const import ( CONF_MANUFACTURER, CONF_SERIAL_NUMBER, - CONF_TYPE, CONF_UPDATE_AUDYSSEY, DEFAULT_UPDATE_AUDYSSEY, DOMAIN, @@ -110,13 +108,12 @@ DENON_STATE_MAPPING = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: DenonavrConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the DenonAVR receiver from a config entry.""" entities = [] - data = hass.data[DOMAIN][config_entry.entry_id] - receiver = data[CONF_RECEIVER] + receiver = config_entry.runtime_data update_audyssey = config_entry.options.get( CONF_UPDATE_AUDYSSEY, DEFAULT_UPDATE_AUDYSSEY ) @@ -253,7 +250,7 @@ class DenonDevice(MediaPlayerEntity): self, receiver: DenonAVR, unique_id: str, - config_entry: ConfigEntry, + config_entry: DenonavrConfigEntry, update_audyssey: bool, ) -> None: """Initialize the device.""" diff --git a/homeassistant/components/dexcom/__init__.py b/homeassistant/components/dexcom/__init__.py index e93e8e66358..54722c8dade 100644 --- a/homeassistant/components/dexcom/__init__.py +++ b/homeassistant/components/dexcom/__init__.py @@ -1,24 +1,16 @@ """The Dexcom integration.""" -from datetime import timedelta -import logging +from pydexcom import AccountError, Dexcom, SessionError -from pydexcom import AccountError, Dexcom, GlucoseReading, SessionError - -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_SERVER, DOMAIN, PLATFORMS, SERVER_OUS - -_LOGGER = logging.getLogger(__name__) - -SCAN_INTERVAL = timedelta(seconds=180) +from .const import CONF_SERVER, PLATFORMS, SERVER_OUS +from .coordinator import DexcomConfigEntry, DexcomCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: DexcomConfigEntry) -> bool: """Set up Dexcom from a config entry.""" try: dexcom = await hass.async_add_executor_job( @@ -32,31 +24,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SessionError as error: raise ConfigEntryNotReady from error - async def async_update_data(): - try: - return await hass.async_add_executor_job(dexcom.get_current_glucose_reading) - except SessionError as error: - raise UpdateFailed(error) from error - - coordinator = DataUpdateCoordinator[GlucoseReading]( - hass, - _LOGGER, - config_entry=entry, - name=DOMAIN, - update_method=async_update_data, - update_interval=SCAN_INTERVAL, - ) + coordinator = DexcomCoordinator(hass, entry=entry, dexcom=dexcom) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: DexcomConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/dexcom/coordinator.py b/homeassistant/components/dexcom/coordinator.py new file mode 100644 index 00000000000..a9e14def350 --- /dev/null +++ b/homeassistant/components/dexcom/coordinator.py @@ -0,0 +1,44 @@ +"""Coordinator for the Dexcom integration.""" + +from datetime import timedelta +import logging + +from pydexcom import Dexcom, GlucoseReading + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +_SCAN_INTERVAL = timedelta(seconds=180) + +type DexcomConfigEntry = ConfigEntry[DexcomCoordinator] + + +class DexcomCoordinator(DataUpdateCoordinator[GlucoseReading]): + """Dexcom Coordinator.""" + + def __init__( + self, + hass: HomeAssistant, + entry: DexcomConfigEntry, + dexcom: Dexcom, + ) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=entry, + name=DOMAIN, + update_interval=_SCAN_INTERVAL, + ) + self.dexcom = dexcom + + async def _async_update_data(self) -> GlucoseReading: + """Fetch data from API endpoint.""" + return await self.hass.async_add_executor_job( + self.dexcom.get_current_glucose_reading + ) diff --git a/homeassistant/components/dexcom/sensor.py b/homeassistant/components/dexcom/sensor.py index 850678e7ac9..cdb1894b675 100644 --- a/homeassistant/components/dexcom/sensor.py +++ b/homeassistant/components/dexcom/sensor.py @@ -2,20 +2,15 @@ from __future__ import annotations -from pydexcom import GlucoseReading - from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_USERNAME, UnitOfBloodGlucoseConcentration from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN +from .coordinator import DexcomConfigEntry, DexcomCoordinator TRENDS = { 1: "rising_quickly", @@ -30,11 +25,11 @@ TRENDS = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: DexcomConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Dexcom sensors.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data username = config_entry.data[CONF_USERNAME] async_add_entities( [ @@ -44,16 +39,14 @@ async def async_setup_entry( ) -class DexcomSensorEntity( - CoordinatorEntity[DataUpdateCoordinator[GlucoseReading]], SensorEntity -): +class DexcomSensorEntity(CoordinatorEntity[DexcomCoordinator], SensorEntity): """Base Dexcom sensor entity.""" _attr_has_entity_name = True def __init__( self, - coordinator: DataUpdateCoordinator[GlucoseReading], + coordinator: DexcomCoordinator, username: str, entry_id: str, key: str, @@ -78,7 +71,7 @@ class DexcomGlucoseValueSensor(DexcomSensorEntity): def __init__( self, - coordinator: DataUpdateCoordinator, + coordinator: DexcomCoordinator, username: str, entry_id: str, ) -> None: @@ -101,7 +94,7 @@ class DexcomGlucoseTrendSensor(DexcomSensorEntity): _attr_options = list(TRENDS.values()) def __init__( - self, coordinator: DataUpdateCoordinator, username: str, entry_id: str + self, coordinator: DexcomCoordinator, username: str, entry_id: str ) -> None: """Initialize the sensor.""" super().__init__(coordinator, username, entry_id, "trend") diff --git a/homeassistant/components/directv/__init__.py b/homeassistant/components/directv/__init__.py index e59fa4e9d0d..274cc4cbf53 100644 --- a/homeassistant/components/directv/__init__.py +++ b/homeassistant/components/directv/__init__.py @@ -12,13 +12,14 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN - PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] SCAN_INTERVAL = timedelta(seconds=30) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type DirecTVConfigEntry = ConfigEntry[DIRECTV] + + +async def async_setup_entry(hass: HomeAssistant, entry: DirecTVConfigEntry) -> bool: """Set up DirecTV from a config entry.""" dtv = DIRECTV(entry.data[CONF_HOST], session=async_get_clientsession(hass)) @@ -27,18 +28,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except DIRECTVError as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = dtv + entry.runtime_data = dtv await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: DirecTVConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/directv/media_player.py b/homeassistant/components/directv/media_player.py index 6c4a40598de..8998e050a75 100644 --- a/homeassistant/components/directv/media_player.py +++ b/homeassistant/components/directv/media_player.py @@ -14,17 +14,16 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util +from . import DirecTVConfigEntry from .const import ( ATTR_MEDIA_CURRENTLY_RECORDING, ATTR_MEDIA_RATING, ATTR_MEDIA_RECORDED, ATTR_MEDIA_START_TIME, - DOMAIN, ) from .entity import DIRECTVEntity @@ -55,11 +54,11 @@ SUPPORT_DTV_CLIENT = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DirecTVConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the DirecTV config entry.""" - dtv = hass.data[DOMAIN][entry.entry_id] + dtv = entry.runtime_data async_add_entities( ( diff --git a/homeassistant/components/directv/remote.py b/homeassistant/components/directv/remote.py index 5a77d90bd3c..dbaab5fa4e6 100644 --- a/homeassistant/components/directv/remote.py +++ b/homeassistant/components/directv/remote.py @@ -10,11 +10,10 @@ from typing import Any from directv import DIRECTV, DIRECTVError from homeassistant.components.remote import ATTR_NUM_REPEATS, RemoteEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import DirecTVConfigEntry from .entity import DIRECTVEntity _LOGGER = logging.getLogger(__name__) @@ -24,11 +23,11 @@ SCAN_INTERVAL = timedelta(minutes=2) async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DirecTVConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Load DirecTV remote based on a config entry.""" - dtv = hass.data[DOMAIN][entry.entry_id] + dtv = entry.runtime_data async_add_entities( ( diff --git a/homeassistant/components/dlna_dmr/manifest.json b/homeassistant/components/dlna_dmr/manifest.json index adbb4198b9f..82541476a02 100644 --- a/homeassistant/components/dlna_dmr/manifest.json +++ b/homeassistant/components/dlna_dmr/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/dlna_dmr", "iot_class": "local_push", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.42.0", "getmac==0.9.5"], + "requirements": ["async-upnp-client==0.43.0", "getmac==0.9.5"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", diff --git a/homeassistant/components/dlna_dms/manifest.json b/homeassistant/components/dlna_dms/manifest.json index ac5bf3719e3..17fc3dc27e8 100644 --- a/homeassistant/components/dlna_dms/manifest.json +++ b/homeassistant/components/dlna_dms/manifest.json @@ -7,7 +7,7 @@ "dependencies": ["ssdp"], "documentation": "https://www.home-assistant.io/integrations/dlna_dms", "iot_class": "local_polling", - "requirements": ["async-upnp-client==0.42.0"], + "requirements": ["async-upnp-client==0.43.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", diff --git a/homeassistant/components/dormakaba_dkey/__init__.py b/homeassistant/components/dormakaba_dkey/__init__.py index b4304e75aab..0a00490f3d9 100644 --- a/homeassistant/components/dormakaba_dkey/__init__.py +++ b/homeassistant/components/dormakaba_dkey/__init__.py @@ -2,30 +2,24 @@ from __future__ import annotations -from datetime import timedelta -import logging - from py_dormakaba_dkey import DKEYLock -from py_dormakaba_dkey.errors import DKEY_EXCEPTIONS, NotAssociated from py_dormakaba_dkey.models import AssociationData from homeassistant.components import bluetooth from homeassistant.components.bluetooth.match import ADDRESS, BluetoothCallbackMatcher -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.exceptions import ConfigEntryNotReady -from .const import CONF_ASSOCIATION_DATA, DOMAIN, UPDATE_SECONDS -from .models import DormakabaDkeyData +from .const import CONF_ASSOCIATION_DATA +from .coordinator import DormakabaDkeyConfigEntry, DormakabaDkeyCoordinator PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.LOCK, Platform.SENSOR] -_LOGGER = logging.getLogger(__name__) - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: DormakabaDkeyConfigEntry +) -> bool: """Set up Dormakaba dKey from a config entry.""" address: str = entry.data[CONF_ADDRESS] ble_device = bluetooth.async_ble_device_from_address(hass, address.upper(), True) @@ -56,29 +50,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) ) - async def _async_update() -> None: - """Update the device state.""" - try: - await lock.update() - await lock.disconnect() - except NotAssociated as ex: - raise ConfigEntryAuthFailed("Not associated") from ex - except DKEY_EXCEPTIONS as ex: - raise UpdateFailed(str(ex)) from ex - - coordinator = DataUpdateCoordinator( - hass, - _LOGGER, - config_entry=entry, - name=lock.name, - update_method=_async_update, - update_interval=timedelta(seconds=UPDATE_SECONDS), - ) + coordinator = DormakabaDkeyCoordinator(hass, entry, lock) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = DormakabaDkeyData( - lock, coordinator - ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -89,13 +64,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_stop) ) + entry.async_on_unload(coordinator.lock.disconnect) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: DormakabaDkeyConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - data: DormakabaDkeyData = hass.data[DOMAIN].pop(entry.entry_id) - await data.lock.disconnect() - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/dormakaba_dkey/binary_sensor.py b/homeassistant/components/dormakaba_dkey/binary_sensor.py index a8574443e35..56b991bf908 100644 --- a/homeassistant/components/dormakaba_dkey/binary_sensor.py +++ b/homeassistant/components/dormakaba_dkey/binary_sensor.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from py_dormakaba_dkey import DKEYLock from py_dormakaba_dkey.commands import DoorPosition, Notifications, UnlockStatus from homeassistant.components.binary_sensor import ( @@ -13,14 +12,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN +from .coordinator import DormakabaDkeyConfigEntry, DormakabaDkeyCoordinator from .entity import DormakabaDkeyEntity -from .models import DormakabaDkeyData @dataclass(frozen=True, kw_only=True) @@ -48,13 +44,13 @@ BINARY_SENSOR_DESCRIPTIONS = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DormakabaDkeyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the binary sensor platform for Dormakaba dKey.""" - data: DormakabaDkeyData = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( - DormakabaDkeyBinarySensor(data.coordinator, data.lock, description) + DormakabaDkeyBinarySensor(coordinator, description) for description in BINARY_SENSOR_DESCRIPTIONS ) @@ -67,16 +63,15 @@ class DormakabaDkeyBinarySensor(DormakabaDkeyEntity, BinarySensorEntity): def __init__( self, - coordinator: DataUpdateCoordinator[None], - lock: DKEYLock, + coordinator: DormakabaDkeyCoordinator, description: DormakabaDkeyBinarySensorDescription, ) -> None: """Initialize a Dormakaba dKey binary sensor.""" self.entity_description = description - self._attr_unique_id = f"{lock.address}_{description.key}" - super().__init__(coordinator, lock) + self._attr_unique_id = f"{coordinator.lock.address}_{description.key}" + super().__init__(coordinator) @callback def _async_update_attrs(self) -> None: """Handle updating _attr values.""" - self._attr_is_on = self.entity_description.is_on(self._lock.state) + self._attr_is_on = self.entity_description.is_on(self.coordinator.lock.state) diff --git a/homeassistant/components/dormakaba_dkey/coordinator.py b/homeassistant/components/dormakaba_dkey/coordinator.py new file mode 100644 index 00000000000..32f71ebf59d --- /dev/null +++ b/homeassistant/components/dormakaba_dkey/coordinator.py @@ -0,0 +1,50 @@ +"""Coordinator for the Dormakaba dKey integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from py_dormakaba_dkey import DKEYLock +from py_dormakaba_dkey.errors import DKEY_EXCEPTIONS, NotAssociated + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import UPDATE_SECONDS + +_LOGGER = logging.getLogger(__name__) + +type DormakabaDkeyConfigEntry = ConfigEntry[DormakabaDkeyCoordinator] + + +class DormakabaDkeyCoordinator(DataUpdateCoordinator[None]): + """DormakabaDkey coordinator.""" + + def __init__( + self, + hass: HomeAssistant, + entry: DormakabaDkeyConfigEntry, + lock: DKEYLock, + ) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=entry, + name=lock.name, + update_interval=timedelta(seconds=UPDATE_SECONDS), + ) + self.lock = lock + + async def _async_update_data(self) -> None: + """Update the device state.""" + try: + await self.lock.update() + await self.lock.disconnect() + except NotAssociated as ex: + raise ConfigEntryAuthFailed("Not associated") from ex + except DKEY_EXCEPTIONS as ex: + raise UpdateFailed(str(ex)) from ex diff --git a/homeassistant/components/dormakaba_dkey/entity.py b/homeassistant/components/dormakaba_dkey/entity.py index 756edccf02f..cc34a70014d 100644 --- a/homeassistant/components/dormakaba_dkey/entity.py +++ b/homeassistant/components/dormakaba_dkey/entity.py @@ -4,29 +4,25 @@ from __future__ import annotations import abc -from py_dormakaba_dkey import DKEYLock from py_dormakaba_dkey.commands import Notifications from homeassistant.core import callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import DormakabaDkeyCoordinator -class DormakabaDkeyEntity(CoordinatorEntity[DataUpdateCoordinator[None]]): +class DormakabaDkeyEntity(CoordinatorEntity[DormakabaDkeyCoordinator]): """Dormakaba dKey base entity.""" _attr_has_entity_name = True - def __init__( - self, coordinator: DataUpdateCoordinator[None], lock: DKEYLock - ) -> None: + def __init__(self, coordinator: DormakabaDkeyCoordinator) -> None: """Initialize a Dormakaba dKey entity.""" super().__init__(coordinator) - self._lock = lock + lock = coordinator.lock self._attr_device_info = DeviceInfo( name=lock.device_info.device_name or lock.device_info.device_id, model="MTL 9291", @@ -53,5 +49,7 @@ class DormakabaDkeyEntity(CoordinatorEntity[DataUpdateCoordinator[None]]): async def async_added_to_hass(self) -> None: """Register callbacks.""" - self.async_on_remove(self._lock.register_callback(self._handle_state_update)) + self.async_on_remove( + self.coordinator.lock.register_callback(self._handle_state_update) + ) return await super().async_added_to_hass() diff --git a/homeassistant/components/dormakaba_dkey/lock.py b/homeassistant/components/dormakaba_dkey/lock.py index 5f475d37152..352e7cbe0ac 100644 --- a/homeassistant/components/dormakaba_dkey/lock.py +++ b/homeassistant/components/dormakaba_dkey/lock.py @@ -4,28 +4,23 @@ from __future__ import annotations from typing import Any -from py_dormakaba_dkey import DKEYLock from py_dormakaba_dkey.commands import UnlockStatus from homeassistant.components.lock import LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN +from .coordinator import DormakabaDkeyConfigEntry, DormakabaDkeyCoordinator from .entity import DormakabaDkeyEntity -from .models import DormakabaDkeyData async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DormakabaDkeyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the lock platform for Dormakaba dKey.""" - data: DormakabaDkeyData = hass.data[DOMAIN][entry.entry_id] - async_add_entities([DormakabaDkeyLock(data.coordinator, data.lock)]) + async_add_entities([DormakabaDkeyLock(entry.runtime_data)]) class DormakabaDkeyLock(DormakabaDkeyEntity, LockEntity): @@ -33,25 +28,23 @@ class DormakabaDkeyLock(DormakabaDkeyEntity, LockEntity): _attr_has_entity_name = True - def __init__( - self, coordinator: DataUpdateCoordinator[None], lock: DKEYLock - ) -> None: + def __init__(self, coordinator: DormakabaDkeyCoordinator) -> None: """Initialize a Dormakaba dKey lock.""" - self._attr_unique_id = lock.address - super().__init__(coordinator, lock) + self._attr_unique_id = coordinator.lock.address + super().__init__(coordinator) @callback def _async_update_attrs(self) -> None: """Handle updating _attr values.""" - self._attr_is_locked = self._lock.state.unlock_status in ( + self._attr_is_locked = self.coordinator.lock.state.unlock_status in ( UnlockStatus.LOCKED, UnlockStatus.SECURITY_LOCKED, ) async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" - await self._lock.lock() + await self.coordinator.lock.lock() async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - await self._lock.unlock() + await self.coordinator.lock.unlock() diff --git a/homeassistant/components/dormakaba_dkey/models.py b/homeassistant/components/dormakaba_dkey/models.py deleted file mode 100644 index 23687e82334..00000000000 --- a/homeassistant/components/dormakaba_dkey/models.py +++ /dev/null @@ -1,17 +0,0 @@ -"""The Dormakaba dKey integration models.""" - -from __future__ import annotations - -from dataclasses import dataclass - -from py_dormakaba_dkey import DKEYLock - -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator - - -@dataclass -class DormakabaDkeyData: - """Data for the Dormakaba dKey integration.""" - - lock: DKEYLock - coordinator: DataUpdateCoordinator[None] diff --git a/homeassistant/components/dormakaba_dkey/sensor.py b/homeassistant/components/dormakaba_dkey/sensor.py index e461ba1e44f..b1e941bc7e1 100644 --- a/homeassistant/components/dormakaba_dkey/sensor.py +++ b/homeassistant/components/dormakaba_dkey/sensor.py @@ -2,23 +2,18 @@ from __future__ import annotations -from py_dormakaba_dkey import DKEYLock - from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN +from .coordinator import DormakabaDkeyConfigEntry, DormakabaDkeyCoordinator from .entity import DormakabaDkeyEntity -from .models import DormakabaDkeyData BINARY_SENSOR_DESCRIPTIONS = ( SensorEntityDescription( @@ -32,13 +27,13 @@ BINARY_SENSOR_DESCRIPTIONS = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DormakabaDkeyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the lock platform for Dormakaba dKey.""" - data: DormakabaDkeyData = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( - DormakabaDkeySensor(data.coordinator, data.lock, description) + DormakabaDkeySensor(coordinator, description) for description in BINARY_SENSOR_DESCRIPTIONS ) @@ -50,16 +45,17 @@ class DormakabaDkeySensor(DormakabaDkeyEntity, SensorEntity): def __init__( self, - coordinator: DataUpdateCoordinator[None], - lock: DKEYLock, + coordinator: DormakabaDkeyCoordinator, description: SensorEntityDescription, ) -> None: """Initialize a Dormakaba dKey binary sensor.""" self.entity_description = description - self._attr_unique_id = f"{lock.address}_{description.key}" - super().__init__(coordinator, lock) + self._attr_unique_id = f"{coordinator.lock.address}_{description.key}" + super().__init__(coordinator) @callback def _async_update_attrs(self) -> None: """Handle updating _attr values.""" - self._attr_native_value = getattr(self._lock, self.entity_description.key) + self._attr_native_value = getattr( + self.coordinator.lock, self.entity_description.key + ) diff --git a/homeassistant/components/dunehd/__init__.py b/homeassistant/components/dunehd/__init__.py index 27e9e749472..302a7280128 100644 --- a/homeassistant/components/dunehd/__init__.py +++ b/homeassistant/components/dunehd/__init__.py @@ -10,29 +10,21 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN - PLATFORMS: Final[list[Platform]] = [Platform.MEDIA_PLAYER] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type DuneHDConfigEntry = ConfigEntry[DuneHDPlayer] + + +async def async_setup_entry(hass: HomeAssistant, entry: DuneHDConfigEntry) -> bool: """Set up a config entry.""" - host: str = entry.data[CONF_HOST] - - player = DuneHDPlayer(host) - - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = player + entry.runtime_data = DuneHDPlayer(entry.data[CONF_HOST]) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: DuneHDConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/dunehd/media_player.py b/homeassistant/components/dunehd/media_player.py index ded23ea4669..db903cac2bf 100644 --- a/homeassistant/components/dunehd/media_player.py +++ b/homeassistant/components/dunehd/media_player.py @@ -15,11 +15,11 @@ from homeassistant.components.media_player import ( MediaType, async_process_play_media_url, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import DuneHDConfigEntry from .const import ATTR_MANUFACTURER, DEFAULT_NAME, DOMAIN CONF_SOURCES: Final = "sources" @@ -37,14 +37,14 @@ DUNEHD_PLAYER_SUPPORT: Final[MediaPlayerEntityFeature] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: DuneHDConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Add Dune HD entities from a config_entry.""" - unique_id = entry.entry_id - - player: DuneHDPlayer = hass.data[DOMAIN][entry.entry_id] - - async_add_entities([DuneHDPlayerEntity(player, DEFAULT_NAME, unique_id)], True) + async_add_entities( + [DuneHDPlayerEntity(entry.runtime_data, DEFAULT_NAME, entry.entry_id)], True + ) class DuneHDPlayerEntity(MediaPlayerEntity): diff --git a/homeassistant/components/duotecno/__init__.py b/homeassistant/components/duotecno/__init__.py index 1873db45226..766fad49e81 100644 --- a/homeassistant/components/duotecno/__init__.py +++ b/homeassistant/components/duotecno/__init__.py @@ -10,8 +10,6 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN - PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.CLIMATE, @@ -21,7 +19,10 @@ PLATFORMS: list[Platform] = [ ] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type DuotecnoConfigEntry = ConfigEntry[PyDuotecno] + + +async def async_setup_entry(hass: HomeAssistant, entry: DuotecnoConfigEntry) -> bool: """Set up duotecno from a config entry.""" controller = PyDuotecno() @@ -31,14 +32,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) except (OSError, InvalidPassword, LoadFailure) as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = controller + + entry.runtime_data = controller await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: DuotecnoConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/duotecno/binary_sensor.py b/homeassistant/components/duotecno/binary_sensor.py index 10c807a8023..aadef47b998 100644 --- a/homeassistant/components/duotecno/binary_sensor.py +++ b/homeassistant/components/duotecno/binary_sensor.py @@ -2,28 +2,25 @@ from __future__ import annotations -from duotecno.controller import PyDuotecno from duotecno.unit import ControlUnit, VirtualUnit from homeassistant.components.binary_sensor import BinarySensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import DuotecnoConfigEntry from .entity import DuotecnoEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DuotecnoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Duotecno binary sensor on config_entry.""" - cntrl: PyDuotecno = hass.data[DOMAIN][entry.entry_id] async_add_entities( DuotecnoBinarySensor(channel) - for channel in cntrl.get_units(["ControlUnit", "VirtualUnit"]) + for channel in entry.runtime_data.get_units(["ControlUnit", "VirtualUnit"]) ) diff --git a/homeassistant/components/duotecno/climate.py b/homeassistant/components/duotecno/climate.py index 0355d2855d3..83a211d97f5 100644 --- a/homeassistant/components/duotecno/climate.py +++ b/homeassistant/components/duotecno/climate.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any, Final -from duotecno.controller import PyDuotecno from duotecno.unit import SensUnit from homeassistant.components.climate import ( @@ -12,12 +11,11 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import DuotecnoConfigEntry from .entity import DuotecnoEntity, api_call HVACMODE: Final = { @@ -33,13 +31,13 @@ PRESETMODES_REVERSE: Final = {value: key for key, value in PRESETMODES.items()} async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DuotecnoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Duotecno climate based on config_entry.""" - cntrl: PyDuotecno = hass.data[DOMAIN][entry.entry_id] async_add_entities( - DuotecnoClimate(channel) for channel in cntrl.get_units(["SensUnit"]) + DuotecnoClimate(channel) + for channel in entry.runtime_data.get_units(["SensUnit"]) ) diff --git a/homeassistant/components/duotecno/cover.py b/homeassistant/components/duotecno/cover.py index 1c4f7d70fc5..7d879741555 100644 --- a/homeassistant/components/duotecno/cover.py +++ b/homeassistant/components/duotecno/cover.py @@ -4,27 +4,25 @@ from __future__ import annotations from typing import Any -from duotecno.controller import PyDuotecno from duotecno.unit import DuoswitchUnit from homeassistant.components.cover import CoverEntity, CoverEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import DuotecnoConfigEntry from .entity import DuotecnoEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DuotecnoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the duoswitch endities.""" - cntrl: PyDuotecno = hass.data[DOMAIN][entry.entry_id] async_add_entities( - DuotecnoCover(channel) for channel in cntrl.get_units("DuoswitchUnit") + DuotecnoCover(channel) + for channel in entry.runtime_data.get_units("DuoswitchUnit") ) diff --git a/homeassistant/components/duotecno/light.py b/homeassistant/components/duotecno/light.py index 57635ac2bc2..7b41cbaef22 100644 --- a/homeassistant/components/duotecno/light.py +++ b/homeassistant/components/duotecno/light.py @@ -2,26 +2,25 @@ from typing import Any -from duotecno.controller import PyDuotecno from duotecno.unit import DimUnit from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import DuotecnoConfigEntry from .entity import DuotecnoEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DuotecnoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Duotecno light based on config_entry.""" - cntrl: PyDuotecno = hass.data[DOMAIN][entry.entry_id] - async_add_entities(DuotecnoLight(channel) for channel in cntrl.get_units("DimUnit")) + async_add_entities( + DuotecnoLight(channel) for channel in entry.runtime_data.get_units("DimUnit") + ) class DuotecnoLight(DuotecnoEntity, LightEntity): diff --git a/homeassistant/components/duotecno/switch.py b/homeassistant/components/duotecno/switch.py index b3a87786d4e..0c01a6ca4de 100644 --- a/homeassistant/components/duotecno/switch.py +++ b/homeassistant/components/duotecno/switch.py @@ -2,27 +2,25 @@ from typing import Any -from duotecno.controller import PyDuotecno from duotecno.unit import SwitchUnit from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import DuotecnoConfigEntry from .entity import DuotecnoEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: DuotecnoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - cntrl: PyDuotecno = hass.data[DOMAIN][entry.entry_id] async_add_entities( - DuotecnoSwitch(channel) for channel in cntrl.get_units("SwitchUnit") + DuotecnoSwitch(channel) + for channel in entry.runtime_data.get_units("SwitchUnit") ) diff --git a/homeassistant/components/dynalite/__init__.py b/homeassistant/components/dynalite/__init__.py index 7388c43cb89..a1a6a38c8ab 100644 --- a/homeassistant/components/dynalite/__init__.py +++ b/homeassistant/components/dynalite/__init__.py @@ -2,27 +2,17 @@ from __future__ import annotations -import voluptuous as vol - from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .bridge import DynaliteBridge -from .const import ( - ATTR_AREA, - ATTR_CHANNEL, - ATTR_HOST, - DOMAIN, - LOGGER, - PLATFORMS, - SERVICE_REQUEST_AREA_PRESET, - SERVICE_REQUEST_CHANNEL_LEVEL, -) +from .const import DOMAIN, LOGGER, PLATFORMS from .convert_config import convert_config from .panel import async_register_dynalite_frontend +from .services import setup_services CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -31,49 +21,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Dynalite platform.""" hass.data[DOMAIN] = {} - async def dynalite_service(service_call: ServiceCall) -> None: - data = service_call.data - host = data.get(ATTR_HOST, "") - bridges = [ - bridge - for bridge in hass.data[DOMAIN].values() - if not host or bridge.host == host - ] - LOGGER.debug("Selected bridged for service call: %s", bridges) - if service_call.service == SERVICE_REQUEST_AREA_PRESET: - bridge_attr = "request_area_preset" - elif service_call.service == SERVICE_REQUEST_CHANNEL_LEVEL: - bridge_attr = "request_channel_level" - for bridge in bridges: - getattr(bridge.dynalite_devices, bridge_attr)( - data[ATTR_AREA], data.get(ATTR_CHANNEL) - ) - - hass.services.async_register( - DOMAIN, - SERVICE_REQUEST_AREA_PRESET, - dynalite_service, - vol.Schema( - { - vol.Optional(ATTR_HOST): cv.string, - vol.Required(ATTR_AREA): int, - vol.Optional(ATTR_CHANNEL): int, - } - ), - ) - - hass.services.async_register( - DOMAIN, - SERVICE_REQUEST_CHANNEL_LEVEL, - dynalite_service, - vol.Schema( - { - vol.Optional(ATTR_HOST): cv.string, - vol.Required(ATTR_AREA): int, - vol.Required(ATTR_CHANNEL): int, - } - ), - ) + setup_services(hass) await async_register_dynalite_frontend(hass) diff --git a/homeassistant/components/dynalite/services.py b/homeassistant/components/dynalite/services.py new file mode 100644 index 00000000000..14160cced9d --- /dev/null +++ b/homeassistant/components/dynalite/services.py @@ -0,0 +1,79 @@ +"""Support for the Dynalite networks.""" + +from __future__ import annotations + +import voluptuous as vol + +from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.helpers import config_validation as cv + +from .bridge import DynaliteBridge +from .const import ( + ATTR_AREA, + ATTR_CHANNEL, + ATTR_HOST, + DOMAIN, + LOGGER, + SERVICE_REQUEST_AREA_PRESET, + SERVICE_REQUEST_CHANNEL_LEVEL, +) + + +@callback +def _get_bridges(service_call: ServiceCall) -> list[DynaliteBridge]: + host = service_call.data.get(ATTR_HOST, "") + bridges = [ + bridge + for bridge in service_call.hass.data[DOMAIN].values() + if not host or bridge.host == host + ] + LOGGER.debug("Selected bridges for service call: %s", bridges) + return bridges + + +async def _request_area_preset(service_call: ServiceCall) -> None: + bridges = _get_bridges(service_call) + data = service_call.data + for bridge in bridges: + bridge.dynalite_devices.request_area_preset( + data[ATTR_AREA], data.get(ATTR_CHANNEL) + ) + + +async def _request_channel_level(service_call: ServiceCall) -> None: + bridges = _get_bridges(service_call) + data = service_call.data + for bridge in bridges: + bridge.dynalite_devices.request_channel_level( + data[ATTR_AREA], data[ATTR_CHANNEL] + ) + + +@callback +def setup_services(hass: HomeAssistant) -> None: + """Set up the Dynalite platform.""" + hass.services.async_register( + DOMAIN, + SERVICE_REQUEST_AREA_PRESET, + _request_area_preset, + vol.Schema( + { + vol.Optional(ATTR_HOST): cv.string, + vol.Required(ATTR_AREA): int, + vol.Optional(ATTR_CHANNEL): int, + } + ), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_REQUEST_CHANNEL_LEVEL, + _request_channel_level, + vol.Schema( + { + vol.Optional(ATTR_HOST): cv.string, + vol.Required(ATTR_AREA): int, + vol.Required(ATTR_CHANNEL): int, + } + ), + ) diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 8c636bd9b04..7713a8fb4b9 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -71,7 +71,7 @@ }, "start_date": { "name": "Start date", - "description": "Date the vacation starts in the YYYY-MM-DD format (optional, immediately if not provided along with start_time)." + "description": "Date the vacation starts in the YYYY-MM-DD format (optional, immediately if not provided along with 'Start time')." }, "start_time": { "name": "Start time", @@ -79,7 +79,7 @@ }, "end_date": { "name": "End date", - "description": "Date the vacation ends in the YYYY-MM-DD format (optional, 14 days from now if not provided along with end_time)." + "description": "Date the vacation ends in the YYYY-MM-DD format (optional, 14 days from now if not provided along with 'End time')." }, "end_time": { "name": "End time", @@ -149,11 +149,11 @@ }, "set_mic_mode": { "name": "Set mic mode", - "description": "Enables/disables Alexa mic (only for Ecobee 4).", + "description": "Enables/disables Alexa microphone (only for Ecobee 4).", "fields": { "mic_enabled": { "name": "Mic enabled", - "description": "Enable Alexa mic." + "description": "Enable Alexa microphone." } } }, diff --git a/homeassistant/components/enphase_envoy/binary_sensor.py b/homeassistant/components/enphase_envoy/binary_sensor.py index 1ad6f259de1..0258281661a 100644 --- a/homeassistant/components/enphase_envoy/binary_sensor.py +++ b/homeassistant/components/enphase_envoy/binary_sensor.py @@ -67,7 +67,6 @@ ENPOWER_SENSORS = ( EnvoyEnpowerBinarySensorEntityDescription( key="mains_oper_state", translation_key="grid_status", - icon="mdi:transmission-tower", value_fn=lambda enpower: enpower.mains_oper_state == "closed", ), ) diff --git a/homeassistant/components/enphase_envoy/icons.json b/homeassistant/components/enphase_envoy/icons.json new file mode 100644 index 00000000000..21262d1dc89 --- /dev/null +++ b/homeassistant/components/enphase_envoy/icons.json @@ -0,0 +1,58 @@ +{ + "entity": { + "binary_sensor": { + "grid_status": { + "default": "mdi:transmission-tower", + "state": { + "off": "mdi:transmission-tower-off" + } + } + }, + "sensor": { + "current_power_production": { + "default": "mdi:solar-power" + }, + "daily_production": { + "default": "mdi:solar-power" + }, + "seven_days_production": { + "default": "mdi:solar-power" + }, + "lifetime_production": { + "default": "mdi:solar-power" + }, + "current_power_production_phase": { + "default": "mdi:solar-power" + }, + "daily_production_phase": { + "default": "mdi:solar-power" + }, + "seven_days_production_phase": { + "default": "mdi:solar-power" + }, + "lifetime_production_phase": { + "default": "mdi:solar-power" + }, + "max_capacity": { + "default": "mdi:battery-charging-100" + }, + "available_energy": { + "default": "mdi:battery-50" + } + }, + "switch": { + "grid_enabled": { + "default": "mdi:transmission-tower", + "state": { + "off": "mdi:transmission-tower-off" + } + }, + "relay_status": { + "default": "mdi:electric-switch-closed", + "state": { + "off": "mdi:electric-switch" + } + } + } + } +} diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index bdc90e6c634..0b1fd8b04b9 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.23.0"], + "requirements": ["pyenphase==1.23.1"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 6100c91fbb4..4431a298c8c 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -76,12 +76,12 @@ rules: comment: https://www.home-assistant.io/integrations/enphase_envoy#troubleshooting docs-use-cases: todo dynamic-devices: todo - entity-category: todo + entity-category: done entity-device-class: done entity-disabled-by-default: done entity-translations: done exception-translations: done - icon-translations: todo + icon-translations: done reconfiguration-flow: done repair-issues: status: exempt diff --git a/homeassistant/components/enphase_envoy/select.py b/homeassistant/components/enphase_envoy/select.py index d9729a16683..7dc275aab37 100644 --- a/homeassistant/components/enphase_envoy/select.py +++ b/homeassistant/components/enphase_envoy/select.py @@ -37,7 +37,7 @@ class EnvoyRelaySelectEntityDescription(SelectEntityDescription): class EnvoyStorageSettingsSelectEntityDescription(SelectEntityDescription): """Describes an Envoy storage settings select entity.""" - value_fn: Callable[[EnvoyStorageSettings], str] + value_fn: Callable[[EnvoyStorageSettings], str | None] update_fn: Callable[[Envoy, str], Awaitable[dict[str, Any]]] @@ -118,7 +118,9 @@ STORAGE_MODE_ENTITY = EnvoyStorageSettingsSelectEntityDescription( key="storage_mode", translation_key="storage_mode", options=STORAGE_MODE_OPTIONS, - value_fn=lambda storage_settings: STORAGE_MODE_MAP[storage_settings.mode], + value_fn=lambda storage_settings: ( + None if not storage_settings.mode else STORAGE_MODE_MAP[storage_settings.mode] + ), update_fn=lambda envoy, value: envoy.set_storage_mode( REVERSE_STORAGE_MODE_MAP[value] ), @@ -235,7 +237,7 @@ class EnvoyStorageSettingsSelectEntity(EnvoyBaseEntity, SelectEntity): ) @property - def current_option(self) -> str: + def current_option(self) -> str | None: """Return the state of the select entity.""" assert self.data.tariff is not None assert self.data.tariff.storage_settings is not None diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index 62ae5b621ac..dcf062a5417 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -37,6 +37,7 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( PERCENTAGE, + EntityCategory, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, @@ -55,7 +56,6 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity -ICON = "mdi:flash" _LOGGER = logging.getLogger(__name__) INVERTERS_KEY = "inverters" @@ -370,6 +370,7 @@ CT_NET_CONSUMPTION_SENSORS = ( key="net_consumption_ct_metering_status", translation_key="net_ct_metering_status", device_class=SensorDeviceClass.ENUM, + entity_category=EntityCategory.DIAGNOSTIC, options=list(CtMeterStatus), entity_registry_enabled_default=False, value_fn=attrgetter("metering_status"), @@ -379,6 +380,7 @@ CT_NET_CONSUMPTION_SENSORS = ( key="net_consumption_ct_status_flags", translation_key="net_ct_status_flags", state_class=None, + entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags), on_phase=None, @@ -452,6 +454,7 @@ CT_PRODUCTION_SENSORS = ( translation_key="production_ct_metering_status", device_class=SensorDeviceClass.ENUM, options=list(CtMeterStatus), + entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, value_fn=attrgetter("metering_status"), on_phase=None, @@ -460,6 +463,7 @@ CT_PRODUCTION_SENSORS = ( key="production_ct_status_flags", translation_key="production_ct_status_flags", state_class=None, + entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags), on_phase=None, @@ -565,6 +569,7 @@ CT_STORAGE_SENSORS = ( translation_key="storage_ct_metering_status", device_class=SensorDeviceClass.ENUM, options=list(CtMeterStatus), + entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, value_fn=attrgetter("metering_status"), on_phase=None, @@ -573,6 +578,7 @@ CT_STORAGE_SENSORS = ( key="storage_ct_status_flags", translation_key="storage_ct_status_flags", state_class=None, + entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags), on_phase=None, @@ -946,8 +952,6 @@ class EnvoySensorBaseEntity(EnvoyBaseEntity, SensorEntity): class EnvoySystemSensorEntity(EnvoySensorBaseEntity): """Envoy system base entity.""" - _attr_icon = ICON - def __init__( self, coordinator: EnphaseUpdateCoordinator, @@ -1174,7 +1178,6 @@ class EnvoyStorageCTPhaseEntity(EnvoySystemSensorEntity): class EnvoyInverterEntity(EnvoySensorBaseEntity): """Envoy inverter entity.""" - _attr_icon = ICON entity_description: EnvoyInverterSensorEntityDescription def __init__( diff --git a/homeassistant/components/enphase_envoy/switch.py b/homeassistant/components/enphase_envoy/switch.py index 5170b694587..7074f341cc8 100644 --- a/homeassistant/components/enphase_envoy/switch.py +++ b/homeassistant/components/enphase_envoy/switch.py @@ -60,6 +60,7 @@ ENPOWER_GRID_SWITCH = EnvoyEnpowerSwitchEntityDescription( RELAY_STATE_SWITCH = EnvoyDryContactSwitchEntityDescription( key="relay_status", + translation_key="relay_status", value_fn=lambda dry_contact: dry_contact.status == DryContactStatus.CLOSED, turn_on_fn=lambda envoy, id: envoy.close_dry_contact(id), turn_off_fn=lambda envoy, id: envoy.open_dry_contact(id), diff --git a/homeassistant/components/envisalink/strings.json b/homeassistant/components/envisalink/strings.json index a539c890169..265ce28f920 100644 --- a/homeassistant/components/envisalink/strings.json +++ b/homeassistant/components/envisalink/strings.json @@ -16,11 +16,11 @@ }, "invoke_custom_function": { "name": "Invoke custom function", - "description": "Allows users with DSC panels to trigger a PGM output (1-4). Note that you need to specify the alarm panel's \"code\" parameter for this to work.\n.", + "description": "Allows users with DSC panels to trigger a PGM output (1-4). Note that you need to specify the alarm panel's \"code\" parameter for this to work.", "fields": { "partition": { "name": "Partition", - "description": "The alarm panel partition to trigger the PGM output on. Typically this is just \"1\".\n." + "description": "The alarm panel partition to trigger the PGM output on. Typically this is just \"1\"." }, "pgm": { "name": "PGM", diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json index 43f18d4fffc..43f524516a8 100644 --- a/homeassistant/components/eq3btsmart/manifest.json +++ b/homeassistant/components/eq3btsmart/manifest.json @@ -22,5 +22,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["eq3btsmart"], - "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.0.0"] + "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.1.1"] } diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index f56f8342df6..4682be1c5c7 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -18,7 +18,7 @@ "requirements": [ "aioesphomeapi==28.0.1", "esphome-dashboard-api==1.2.3", - "bleak-esphome==2.0.0" + "bleak-esphome==2.1.1" ], "zeroconf": ["_esphomelib._tcp.local."] } diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 3d9f12bd3d3..2724569d1ed 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -21,5 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20250109.0"] + "requirements": ["home-assistant-frontend==20250109.2"] } diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index dd6829eacce..fe6f0253f48 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -268,6 +268,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity): else: self._attr_preset_modes = [PRESET_NONE] self._presets = presets + self._presets_inv = {v: k for k, v in presets.items()} async def async_added_to_hass(self) -> None: """Run when entity about to be added.""" @@ -421,6 +422,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity): """Set new target temperature.""" if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: return + self._attr_preset_mode = self._presets_inv.get(temperature, PRESET_NONE) self._target_temp = temperature await self._async_control_heating(force=True) self.async_write_ha_state() diff --git a/homeassistant/components/harmony/manifest.json b/homeassistant/components/harmony/manifest.json index d37801376ec..aab4f51b09a 100644 --- a/homeassistant/components/harmony/manifest.json +++ b/homeassistant/components/harmony/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/harmony", "iot_class": "local_push", "loggers": ["aioharmony", "slixmpp"], - "requirements": ["aioharmony==0.2.10"], + "requirements": ["aioharmony==0.4.1"], "ssdp": [ { "manufacturer": "Logitech", diff --git a/homeassistant/components/hdmi_cec/strings.json b/homeassistant/components/hdmi_cec/strings.json index 449b9f72fe7..70848b0514e 100644 --- a/homeassistant/components/hdmi_cec/strings.json +++ b/homeassistant/components/hdmi_cec/strings.json @@ -6,7 +6,7 @@ }, "select_device": { "name": "Select device", - "description": "Select HDMI device.", + "description": "Selects an HDMI device.", "fields": { "device": { "name": "[%key:common::config_flow::data::device%]", diff --git a/homeassistant/components/heos/__init__.py b/homeassistant/components/heos/__init__.py index a3e720a5f21..10fd2bfcff3 100644 --- a/homeassistant/components/heos/__init__.py +++ b/homeassistant/components/heos/__init__.py @@ -2,54 +2,18 @@ from __future__ import annotations -import asyncio -from dataclasses import dataclass from datetime import timedelta -import logging -from typing import Any - -from pyheos import ( - Credentials, - Heos, - HeosError, - HeosOptions, - HeosPlayer, - PlayerUpdateResult, - SignalHeosEvent, - const as heos_const, -) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_HOST, - CONF_PASSWORD, - CONF_USERNAME, - EVENT_HOMEASSISTANT_STOP, - Platform, -) -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback -from homeassistant.exceptions import ( - ConfigEntryNotReady, - HomeAssistantError, - ServiceValidationError, -) -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) from homeassistant.helpers.typing import ConfigType -from homeassistant.util import Throttle from . import services -from .const import ( - COMMAND_RETRY_ATTEMPTS, - COMMAND_RETRY_DELAY, - DOMAIN, - SIGNAL_HEOS_PLAYER_ADDED, - SIGNAL_HEOS_UPDATED, -) +from .const import DOMAIN +from .coordinator import HeosCoordinator PLATFORMS = [Platform.MEDIA_PLAYER] @@ -57,20 +21,7 @@ MIN_UPDATE_SOURCES = timedelta(seconds=1) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -_LOGGER = logging.getLogger(__name__) - - -@dataclass -class HeosRuntimeData: - """Runtime data and coordinators for HEOS config entries.""" - - controller_manager: ControllerManager - group_manager: GroupManager - source_manager: SourceManager - players: dict[int, HeosPlayer] - - -type HeosConfigEntry = ConfigEntry[HeosRuntimeData] +type HeosConfigEntry = ConfigEntry[HeosCoordinator] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -97,78 +48,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool ) break - host = entry.data[CONF_HOST] - credentials: Credentials | None = None - if entry.options: - credentials = Credentials( - entry.options[CONF_USERNAME], entry.options[CONF_PASSWORD] - ) - - # Setting all_progress_events=False ensures that we only receive a - # media position update upon start of playback or when media changes - controller = Heos( - HeosOptions( - host, - all_progress_events=False, - auto_reconnect=True, - credentials=credentials, - ) - ) - - # Auth failure handler must be added before connecting to the host, otherwise - # the event will be missed when login fails during connection. - async def auth_failure() -> None: - """Handle authentication failure.""" - entry.async_start_reauth(hass) - - entry.async_on_unload(controller.add_on_user_credentials_invalid(auth_failure)) - - try: - # Auto reconnect only operates if initial connection was successful. - await controller.connect() - except HeosError as error: - await controller.disconnect() - _LOGGER.debug("Unable to connect to controller %s: %s", host, error) - raise ConfigEntryNotReady from error - - # Disconnect when shutting down - async def disconnect_controller(event): - await controller.disconnect() - - entry.async_on_unload( - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, disconnect_controller) - ) - - # Get players and sources - try: - players = await controller.get_players() - favorites = {} - if controller.is_signed_in: - favorites = await controller.get_favorites() - else: - _LOGGER.warning( - "The HEOS System is not logged in: Enter credentials in the integration options to access favorites and streaming services" - ) - inputs = await controller.get_input_sources() - except HeosError as error: - await controller.disconnect() - _LOGGER.debug("Unable to retrieve players and sources: %s", error) - raise ConfigEntryNotReady from error - - controller_manager = ControllerManager(hass, controller) - await controller_manager.connect_listeners() - - source_manager = SourceManager(favorites, inputs) - source_manager.connect_update(hass, controller) - - group_manager = GroupManager(hass, controller, players) - - entry.runtime_data = HeosRuntimeData( - controller_manager, group_manager, source_manager, players - ) - - group_manager.connect_update() - entry.async_on_unload(group_manager.disconnect_update) + coordinator = HeosCoordinator(hass, entry) + await coordinator.async_setup() + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -177,343 +59,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: """Unload a config entry.""" - await entry.runtime_data.controller_manager.disconnect() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class ControllerManager: - """Class that manages events of the controller.""" - - def __init__(self, hass: HomeAssistant, controller: Heos) -> None: - """Init the controller manager.""" - self._hass = hass - self._device_registry: dr.DeviceRegistry | None = None - self._entity_registry: er.EntityRegistry | None = None - self.controller = controller - - async def connect_listeners(self): - """Subscribe to events of interest.""" - self._device_registry = dr.async_get(self._hass) - self._entity_registry = er.async_get(self._hass) - - # Handle controller events - self.controller.add_on_controller_event(self._controller_event) - - # Handle connection-related events - self.controller.add_on_heos_event(self._heos_event) - - async def disconnect(self): - """Disconnect subscriptions.""" - self.controller.dispatcher.disconnect_all() - await self.controller.disconnect() - - async def _controller_event( - self, event: str, data: PlayerUpdateResult | None - ) -> None: - """Handle controller event.""" - if event == heos_const.EVENT_PLAYERS_CHANGED: - assert data is not None - self.update_ids(data.updated_player_ids) - # Update players - async_dispatcher_send(self._hass, SIGNAL_HEOS_UPDATED) - - async def _heos_event(self, event): - """Handle connection event.""" - if event == SignalHeosEvent.CONNECTED: - try: - # Retrieve latest players and refresh status - data = await self.controller.load_players() - self.update_ids(data.updated_player_ids) - except HeosError as ex: - _LOGGER.error("Unable to refresh players: %s", ex) - # Update players - _LOGGER.debug("HEOS Controller event called, calling dispatcher") - async_dispatcher_send(self._hass, SIGNAL_HEOS_UPDATED) - - def update_ids(self, mapped_ids: dict[int, int]): - """Update the IDs in the device and entity registry.""" - # mapped_ids contains the mapped IDs (new:old) - for old_id, new_id in mapped_ids.items(): - # update device registry - assert self._device_registry is not None - entry = self._device_registry.async_get_device( - identifiers={(DOMAIN, str(old_id))} - ) - new_identifiers = {(DOMAIN, str(new_id))} - if entry: - self._device_registry.async_update_device( - entry.id, - new_identifiers=new_identifiers, - ) - _LOGGER.debug( - "Updated device %s identifiers to %s", entry.id, new_identifiers - ) - # update entity registry - assert self._entity_registry is not None - entity_id = self._entity_registry.async_get_entity_id( - Platform.MEDIA_PLAYER, DOMAIN, str(old_id) - ) - if entity_id: - self._entity_registry.async_update_entity( - entity_id, new_unique_id=str(new_id) - ) - _LOGGER.debug("Updated entity %s unique id to %s", entity_id, new_id) - - -class GroupManager: - """Class that manages HEOS groups.""" - - def __init__( - self, hass: HomeAssistant, controller: Heos, players: dict[int, HeosPlayer] - ) -> None: - """Init group manager.""" - self._hass = hass - self._group_membership: dict[str, list[str]] = {} - self._disconnect_player_added = None - self._initialized = False - self.controller = controller - self.players = players - self.entity_id_map: dict[int, str] = {} - - def _get_entity_id_to_player_id_map(self) -> dict: - """Return mapping of all HeosMediaPlayer entity_ids to player_ids.""" - return {v: k for k, v in self.entity_id_map.items()} - - async def async_get_group_membership(self) -> dict[str, list[str]]: - """Return all group members for each player as entity_ids.""" - group_info_by_entity_id: dict[str, list[str]] = { - player_entity_id: [] - for player_entity_id in self._get_entity_id_to_player_id_map() - } - - try: - groups = await self.controller.get_groups() - except HeosError as err: - _LOGGER.error("Unable to get HEOS group info: %s", err) - return group_info_by_entity_id - - player_id_to_entity_id_map = self.entity_id_map - for group in groups.values(): - leader_entity_id = player_id_to_entity_id_map.get(group.lead_player_id) - member_entity_ids = [ - player_id_to_entity_id_map[member] - for member in group.member_player_ids - if member in player_id_to_entity_id_map - ] - # Make sure the group leader is always the first element - group_info = [leader_entity_id, *member_entity_ids] - if leader_entity_id: - group_info_by_entity_id[leader_entity_id] = group_info # type: ignore[assignment] - for member_entity_id in member_entity_ids: - group_info_by_entity_id[member_entity_id] = group_info # type: ignore[assignment] - - return group_info_by_entity_id - - async def async_join_players( - self, leader_id: int, member_entity_ids: list[str] - ) -> None: - """Create a group a group leader and member players.""" - # Resolve HEOS player_id for each member entity_id - entity_id_to_player_id_map = self._get_entity_id_to_player_id_map() - member_ids: list[int] = [] - for member in member_entity_ids: - member_id = entity_id_to_player_id_map.get(member) - if not member_id: - raise HomeAssistantError( - f"The group member {member} could not be resolved to a HEOS player." - ) - member_ids.append(member_id) - - await self.controller.create_group(leader_id, member_ids) - - async def async_unjoin_player(self, player_id: int): - """Remove `player_entity_id` from any group.""" - await self.controller.create_group(player_id, []) - - async def async_update_groups(self) -> None: - """Update the group membership from the controller.""" - if groups := await self.async_get_group_membership(): - self._group_membership = groups - _LOGGER.debug("Groups updated due to change event") - # Let players know to update - async_dispatcher_send(self._hass, SIGNAL_HEOS_UPDATED) - else: - _LOGGER.debug("Groups empty") - - @callback - def connect_update(self): - """Connect listener for when groups change and signal player update.""" - - async def _on_controller_event(event: str, data: Any | None) -> None: - if event == heos_const.EVENT_GROUPS_CHANGED: - await self.async_update_groups() - - self.controller.add_on_controller_event(_on_controller_event) - self.controller.add_on_connected(self.async_update_groups) - - # When adding a new HEOS player we need to update the groups. - async def _async_handle_player_added(): - # Avoid calling async_update_groups when the entity_id map has not been - # fully populated yet. This may only happen during early startup. - if len(self.players) <= len(self.entity_id_map) and not self._initialized: - self._initialized = True - await self.async_update_groups() - - self._disconnect_player_added = async_dispatcher_connect( - self._hass, SIGNAL_HEOS_PLAYER_ADDED, _async_handle_player_added - ) - - @callback - def disconnect_update(self): - """Disconnect the listeners.""" - if self._disconnect_player_added: - self._disconnect_player_added() - self._disconnect_player_added = None - - @callback - def register_media_player(self, player_id: int, entity_id: str) -> CALLBACK_TYPE: - """Register a media player player_id with it's entity_id so it can be resolved later.""" - self.entity_id_map[player_id] = entity_id - return lambda: self.unregister_media_player(player_id) - - @callback - def unregister_media_player(self, player_id) -> None: - """Remove a media player player_id from the entity_id map.""" - self.entity_id_map.pop(player_id, None) - - @property - def group_membership(self): - """Provide access to group members for player entities.""" - return self._group_membership - - -class SourceManager: - """Class that manages sources for players.""" - - def __init__( - self, - favorites, - inputs, - *, - retry_delay: int = COMMAND_RETRY_DELAY, - max_retry_attempts: int = COMMAND_RETRY_ATTEMPTS, - ) -> None: - """Init input manager.""" - self.retry_delay = retry_delay - self.max_retry_attempts = max_retry_attempts - self.favorites = favorites - self.inputs = inputs - self.source_list = self._build_source_list() - - def _build_source_list(self): - """Build a single list of inputs from various types.""" - source_list = [] - source_list.extend([favorite.name for favorite in self.favorites.values()]) - source_list.extend([source.name for source in self.inputs]) - return source_list - - async def play_source(self, source: str, player): - """Determine type of source and play it.""" - index = next( - ( - index - for index, favorite in self.favorites.items() - if favorite.name == source - ), - None, - ) - if index is not None: - await player.play_preset_station(index) - return - - input_source = next( - ( - input_source - for input_source in self.inputs - if input_source.name == source - ), - None, - ) - if input_source is not None: - await player.play_input_source(input_source.media_id) - return - - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="unknown_source", - translation_placeholders={"source": source}, - ) - - def get_current_source(self, now_playing_media): - """Determine current source from now playing media.""" - # Match input by input_name:media_id - if now_playing_media.source_id == heos_const.MUSIC_SOURCE_AUX_INPUT: - return next( - ( - input_source.name - for input_source in self.inputs - if input_source.media_id == now_playing_media.media_id - ), - None, - ) - # Try matching favorite by name:station or media_id:album_id - return next( - ( - source.name - for source in self.favorites.values() - if source.name == now_playing_media.station - or source.media_id == now_playing_media.album_id - ), - None, - ) - - @callback - def connect_update(self, hass: HomeAssistant, controller: Heos) -> None: - """Connect listener for when sources change and signal player update. - - EVENT_SOURCES_CHANGED is often raised multiple times in response to a - physical event therefore throttle it. Retrieving sources immediately - after the event may fail so retry. - """ - - @Throttle(MIN_UPDATE_SOURCES) - async def get_sources(): - retry_attempts = 0 - while True: - try: - favorites = {} - if controller.is_signed_in: - favorites = await controller.get_favorites() - inputs = await controller.get_input_sources() - except HeosError as error: - if retry_attempts < self.max_retry_attempts: - retry_attempts += 1 - _LOGGER.debug( - "Error retrieving sources and will retry: %s", error - ) - await asyncio.sleep(self.retry_delay) - else: - _LOGGER.error("Unable to update sources: %s", error) - return None - else: - return favorites, inputs - - async def _update_sources() -> None: - # If throttled, it will return None - if sources := await get_sources(): - self.favorites, self.inputs = sources - self.source_list = self._build_source_list() - _LOGGER.debug("Sources updated due to changed event") - # Let players know to update - async_dispatcher_send(hass, SIGNAL_HEOS_UPDATED) - - async def _on_controller_event(event: str, data: Any | None) -> None: - if event in ( - heos_const.EVENT_SOURCES_CHANGED, - heos_const.EVENT_USER_CHANGED, - ): - await _update_sources() - - controller.add_on_connected(_update_sources) - controller.add_on_user_credentials_invalid(_update_sources) - controller.add_on_controller_event(_on_controller_event) diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index 86d5123bccf..18b8f1f7918 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -2,7 +2,7 @@ from collections.abc import Mapping import logging -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from pyheos import CommandAuthenticationError, Heos, HeosError, HeosOptions @@ -10,6 +10,7 @@ import voluptuous as vol from homeassistant.config_entries import ( ConfigEntry, + ConfigEntryState, ConfigFlow, ConfigFlowResult, OptionsFlow, @@ -22,6 +23,7 @@ from homeassistant.helpers.service_info.ssdp import ( SsdpServiceInfo, ) +from . import HeosConfigEntry from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -183,10 +185,10 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Validate account credentials and update options.""" errors: dict[str, str] = {} - entry = self._get_reauth_entry() + entry: HeosConfigEntry = self._get_reauth_entry() if user_input is not None: - heos = cast(Heos, entry.runtime_data.controller_manager.controller) - if await _validate_auth(user_input, heos, errors): + assert entry.state is ConfigEntryState.LOADED + if await _validate_auth(user_input, entry.runtime_data.heos, errors): return self.async_update_reload_and_abort(entry, options=user_input) return self.async_show_form( @@ -207,10 +209,8 @@ class HeosOptionsFlowHandler(OptionsFlow): """Manage the options.""" errors: dict[str, str] = {} if user_input is not None: - heos = cast( - Heos, self.config_entry.runtime_data.controller_manager.controller - ) - if await _validate_auth(user_input, heos, errors): + entry: HeosConfigEntry = self.config_entry + if await _validate_auth(user_input, entry.runtime_data.heos, errors): return self.async_create_entry(data=user_input) return self.async_show_form( diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 5b2df2b5ebf..7f03fa11e79 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -2,10 +2,6 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" -COMMAND_RETRY_ATTEMPTS = 2 -COMMAND_RETRY_DELAY = 1 DOMAIN = "heos" SERVICE_SIGN_IN = "sign_in" SERVICE_SIGN_OUT = "sign_out" -SIGNAL_HEOS_PLAYER_ADDED = "heos_player_added" -SIGNAL_HEOS_UPDATED = "heos_updated" diff --git a/homeassistant/components/heos/coordinator.py b/homeassistant/components/heos/coordinator.py new file mode 100644 index 00000000000..8ed8449685a --- /dev/null +++ b/homeassistant/components/heos/coordinator.py @@ -0,0 +1,285 @@ +"""HEOS integration coordinator. + +Control of all HEOS devices is through connection to a single device. Data is pushed through events. +The coordinator is responsible for refreshing data in response to system-wide events and notifying +entities to update. Entities subscribe to entity-specific updates within the entity class itself. +""" + +from collections.abc import Callable +from datetime import datetime, timedelta +import logging + +from pyheos import ( + Credentials, + Heos, + HeosError, + HeosNowPlayingMedia, + HeosOptions, + HeosPlayer, + MediaItem, + MediaType, + PlayerUpdateResult, + const, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HassJob, HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from . import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class HeosCoordinator(DataUpdateCoordinator[None]): + """Define the HEOS integration coordinator.""" + + def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: + """Set up the coordinator and set in config_entry.""" + self.host: str = config_entry.data[CONF_HOST] + credentials: Credentials | None = None + if config_entry.options: + credentials = Credentials( + config_entry.options[CONF_USERNAME], config_entry.options[CONF_PASSWORD] + ) + # Setting all_progress_events=False ensures that we only receive a + # media position update upon start of playback or when media changes + self.heos = Heos( + HeosOptions( + self.host, + all_progress_events=False, + auto_reconnect=True, + credentials=credentials, + ) + ) + self._update_sources_pending: bool = False + self._source_list: list[str] = [] + self._favorites: dict[int, MediaItem] = {} + self._inputs: list[MediaItem] = [] + super().__init__(hass, _LOGGER, config_entry=config_entry, name=DOMAIN) + + async def async_setup(self) -> None: + """Set up the coordinator; connect to the host; and retrieve initial data.""" + # Add before connect as it may occur during initial connection + self.heos.add_on_user_credentials_invalid(self._async_on_auth_failure) + # Connect to the device + try: + await self.heos.connect() + except HeosError as error: + raise ConfigEntryNotReady from error + # Load players + try: + await self.heos.get_players() + except HeosError as error: + raise ConfigEntryNotReady from error + + if not self.heos.is_signed_in: + _LOGGER.warning( + "The HEOS System is not logged in: Enter credentials in the integration options to access favorites and streaming services" + ) + # Retrieve initial data + await self._async_update_groups() + await self._async_update_sources() + # Attach event callbacks + self.heos.add_on_disconnected(self._async_on_disconnected) + self.heos.add_on_connected(self._async_on_reconnected) + self.heos.add_on_controller_event(self._async_on_controller_event) + + async def async_shutdown(self) -> None: + """Disconnect all callbacks and disconnect from the device.""" + self.heos.dispatcher.disconnect_all() # Removes all connected through heos.add_on_* and player.add_on_* + await self.heos.disconnect() + await super().async_shutdown() + + def async_add_listener(self, update_callback, context=None) -> Callable[[], None]: + """Add a listener for the coordinator.""" + remove_listener = super().async_add_listener(update_callback, context) + # Update entities so group_member entity_ids fully populate. + self.async_update_listeners() + return remove_listener + + async def _async_on_auth_failure(self) -> None: + """Handle when the user credentials are no longer valid.""" + assert self.config_entry is not None + self.config_entry.async_start_reauth(self.hass) + + async def _async_on_disconnected(self) -> None: + """Handle when disconnected so entities are marked unavailable.""" + _LOGGER.warning("Connection to HEOS host %s lost", self.host) + self.async_update_listeners() + + async def _async_on_reconnected(self) -> None: + """Handle when reconnected so resources are updated and entities marked available.""" + await self._async_update_players() + await self._async_update_sources() + _LOGGER.warning("Successfully reconnected to HEOS host %s", self.host) + self.async_update_listeners() + + async def _async_on_controller_event( + self, event: str, data: PlayerUpdateResult | None + ) -> None: + """Handle a controller event, such as players or groups changed.""" + if event == const.EVENT_PLAYERS_CHANGED: + assert data is not None + if data.updated_player_ids: + self._async_update_player_ids(data.updated_player_ids) + elif event == const.EVENT_GROUPS_CHANGED: + await self._async_update_players() + elif ( + event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED) + and not self._update_sources_pending + ): + # Update the sources after a brief delay as we may have received multiple qualifying + # events at once and devices cannot handle immediately attempting to refresh sources. + self._update_sources_pending = True + + async def update_sources_job(_: datetime | None = None) -> None: + await self._async_update_sources() + self._update_sources_pending = False + self.async_update_listeners() + + assert self.config_entry is not None + self.config_entry.async_on_unload( + async_call_later( + self.hass, + timedelta(seconds=1), + HassJob( + update_sources_job, + "heos_update_sources", + cancel_on_shutdown=True, + ), + ) + ) + self.async_update_listeners() + + def _async_update_player_ids(self, updated_player_ids: dict[int, int]) -> None: + """Update the IDs in the device and entity registry.""" + device_registry = dr.async_get(self.hass) + entity_registry = er.async_get(self.hass) + # updated_player_ids contains the mapped IDs in format old:new + for old_id, new_id in updated_player_ids.items(): + # update device registry + entry = device_registry.async_get_device( + identifiers={(DOMAIN, str(old_id))} + ) + if entry: + new_identifiers = entry.identifiers.copy() + new_identifiers.remove((DOMAIN, str(old_id))) + new_identifiers.add((DOMAIN, str(new_id))) + device_registry.async_update_device( + entry.id, + new_identifiers=new_identifiers, + ) + _LOGGER.debug( + "Updated device %s identifiers to %s", entry.id, new_identifiers + ) + # update entity registry + entity_id = entity_registry.async_get_entity_id( + Platform.MEDIA_PLAYER, DOMAIN, str(old_id) + ) + if entity_id: + entity_registry.async_update_entity( + entity_id, new_unique_id=str(new_id) + ) + _LOGGER.debug("Updated entity %s unique id to %s", entity_id, new_id) + + async def _async_update_groups(self) -> None: + """Update group information.""" + try: + await self.heos.get_groups(refresh=True) + except HeosError as error: + _LOGGER.error("Unable to retrieve groups: %s", error) + + async def _async_update_sources(self) -> None: + """Build source list for entities.""" + self._source_list.clear() + # Get favorites only if reportedly signed in. + if self.heos.is_signed_in: + try: + self._favorites = await self.heos.get_favorites() + except HeosError as error: + _LOGGER.error("Unable to retrieve favorites: %s", error) + else: + self._source_list.extend( + favorite.name for favorite in self._favorites.values() + ) + # Get input sources (across all devices in the HEOS system) + try: + self._inputs = await self.heos.get_input_sources() + except HeosError as error: + _LOGGER.error("Unable to retrieve input sources: %s", error) + else: + self._source_list.extend([source.name for source in self._inputs]) + + async def _async_update_players(self) -> None: + """Update players after reconnection.""" + try: + player_updates = await self.heos.load_players() + except HeosError as error: + _LOGGER.error("Unable to refresh players: %s", error) + return + # After reconnecting, player_id may have changed + if player_updates.updated_player_ids: + self._async_update_player_ids(player_updates.updated_player_ids) + + @callback + def async_get_source_list(self) -> list[str]: + """Return the list of sources for players.""" + return list(self._source_list) + + @callback + def async_get_favorite_index(self, name: str) -> int | None: + """Get the index of a favorite by name.""" + for index, favorite in self._favorites.items(): + if favorite.name == name: + return index + return None + + @callback + def async_get_current_source( + self, now_playing_media: HeosNowPlayingMedia + ) -> str | None: + """Determine current source from now playing media (either input source or favorite).""" + # Try matching input source + if now_playing_media.source_id == const.MUSIC_SOURCE_AUX_INPUT: + # If playing a remote input, name will match station + for input_source in self._inputs: + if input_source.name == now_playing_media.station: + return input_source.name + # If playing a local input, match media_id. This needs to be a second loop as media_id + # will match both local and remote inputs, so prioritize remote match by name first. + for input_source in self._inputs: + if input_source.media_id == now_playing_media.media_id: + return input_source.name + # Try matching favorite + if now_playing_media.type == MediaType.STATION: + # Some stations match on name:station, others match on media_id:album_id + for favorite in self._favorites.values(): + if ( + favorite.name == now_playing_media.station + or favorite.media_id == now_playing_media.album_id + ): + return favorite.name + return None + + async def async_play_source(self, source: str, player: HeosPlayer) -> None: + """Determine type of source and play it.""" + # Favorite + if (index := self.async_get_favorite_index(source)) is not None: + await player.play_preset_station(index) + return + # Input source + for input_source in self._inputs: + if input_source.name == source: + await player.play_media(input_source) + return + + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="unknown_source", + translation_placeholders={"source": source}, + ) diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index b8690040061..d405b235f76 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine from functools import reduce, wraps -import logging from operator import ior from typing import Any @@ -14,6 +13,7 @@ from pyheos import ( HeosError, HeosPlayer, PlayState, + RepeatType, const as heos_const, ) @@ -26,20 +26,21 @@ from homeassistant.components.media_player import ( MediaPlayerEntityFeature, MediaPlayerState, MediaType, + RepeatMode, async_process_play_media_url, ) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow -from . import GroupManager, HeosConfigEntry, SourceManager -from .const import DOMAIN as HEOS_DOMAIN, SIGNAL_HEOS_PLAYER_ADDED, SIGNAL_HEOS_UPDATED +from . import HeosConfigEntry +from .const import DOMAIN as HEOS_DOMAIN +from .coordinator import HeosCoordinator PARALLEL_UPDATES = 0 @@ -48,7 +49,6 @@ BASE_SUPPORTED_FEATURES = ( | MediaPlayerEntityFeature.VOLUME_SET | MediaPlayerEntityFeature.VOLUME_STEP | MediaPlayerEntityFeature.CLEAR_PLAYLIST - | MediaPlayerEntityFeature.SHUFFLE_SET | MediaPlayerEntityFeature.SELECT_SOURCE | MediaPlayerEntityFeature.PLAY_MEDIA | MediaPlayerEntityFeature.GROUPING @@ -78,21 +78,23 @@ HA_HEOS_ENQUEUE_MAP = { MediaPlayerEnqueue.PLAY: AddCriteriaType.PLAY_NOW, } -_LOGGER = logging.getLogger(__name__) +HEOS_HA_REPEAT_TYPE_MAP = { + RepeatType.OFF: RepeatMode.OFF, + RepeatType.ON_ALL: RepeatMode.ALL, + RepeatType.ON_ONE: RepeatMode.ONE, +} +HA_HEOS_REPEAT_TYPE_MAP = {v: k for k, v in HEOS_HA_REPEAT_TYPE_MAP.items()} async def async_setup_entry( hass: HomeAssistant, entry: HeosConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Add media players for a config entry.""" - players = entry.runtime_data.players devices = [ - HeosMediaPlayer( - player, entry.runtime_data.source_manager, entry.runtime_data.group_manager - ) - for player in players.values() + HeosMediaPlayer(entry.runtime_data, player) + for player in entry.runtime_data.heos.players.values() ] - async_add_entities(devices, True) + async_add_entities(devices) type _FuncType[**_P] = Callable[_P, Awaitable[Any]] @@ -121,27 +123,19 @@ def catch_action_error[**_P]( return decorator -class HeosMediaPlayer(MediaPlayerEntity): +class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): """The HEOS player.""" _attr_media_content_type = MediaType.MUSIC - _attr_should_poll = False _attr_supported_features = BASE_SUPPORTED_FEATURES _attr_media_image_remotely_accessible = True _attr_has_entity_name = True _attr_name = None - def __init__( - self, - player: HeosPlayer, - source_manager: SourceManager, - group_manager: GroupManager, - ) -> None: + def __init__(self, coordinator: HeosCoordinator, player: HeosPlayer) -> None: """Initialize.""" self._media_position_updated_at = None self._player: HeosPlayer = player - self._source_manager = source_manager - self._group_manager = group_manager self._attr_unique_id = str(player.player_id) model_parts = player.model.split(maxsplit=1) manufacturer = model_parts[0] if len(model_parts) == 2 else "HEOS" @@ -154,45 +148,73 @@ class HeosMediaPlayer(MediaPlayerEntity): serial_number=player.serial, # Only available for some models sw_version=player.version, ) + super().__init__(coordinator, context=player.player_id) async def _player_update(self, event): """Handle player attribute updated.""" if event == heos_const.EVENT_PLAYER_NOW_PLAYING_PROGRESS: self._media_position_updated_at = utcnow() - await self.async_update_ha_state(True) + self._handle_coordinator_update() - async def _heos_updated(self) -> None: - """Handle sources changed.""" - await self.async_update_ha_state(True) + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attributes() + super()._handle_coordinator_update() + + @callback + def _get_group_members(self) -> list[str] | None: + """Get group member entity IDs for the group.""" + if self._player.group_id is None: + return None + if not (group := self.coordinator.heos.groups.get(self._player.group_id)): + return None + player_ids = [group.lead_player_id, *group.member_player_ids] + # Resolve player_ids to entity_ids + entity_registry = er.async_get(self.hass) + entity_ids = [ + entity_id + for member_id in player_ids + if ( + entity_id := entity_registry.async_get_entity_id( + Platform.MEDIA_PLAYER, HEOS_DOMAIN, str(member_id) + ) + ) + ] + return entity_ids or None + + @callback + def _update_attributes(self) -> None: + """Update core attributes of the media player.""" + self._attr_group_members = self._get_group_members() + self._attr_source_list = self.coordinator.async_get_source_list() + self._attr_source = self.coordinator.async_get_current_source( + self._player.now_playing_media + ) + self._attr_repeat = HEOS_HA_REPEAT_TYPE_MAP[self._player.repeat] + controls = self._player.now_playing_media.supported_controls + current_support = [CONTROL_TO_SUPPORT[control] for control in controls] + self._attr_supported_features = reduce( + ior, current_support, BASE_SUPPORTED_FEATURES + ) + if self.support_next_track and self.support_previous_track: + self._attr_supported_features |= ( + MediaPlayerEntityFeature.REPEAT_SET + | MediaPlayerEntityFeature.SHUFFLE_SET + ) async def async_added_to_hass(self) -> None: """Device added to hass.""" # Update state when attributes of the player change + self._update_attributes() self.async_on_remove(self._player.add_on_player_event(self._player_update)) - # Update state when heos changes - self.async_on_remove( - async_dispatcher_connect(self.hass, SIGNAL_HEOS_UPDATED, self._heos_updated) - ) - # Register this player's entity_id so it can be resolved by the group manager - self.async_on_remove( - self._group_manager.register_media_player( - self._player.player_id, self.entity_id - ) - ) - async_dispatcher_send(self.hass, SIGNAL_HEOS_PLAYER_ADDED) + await super().async_added_to_hass() @catch_action_error("clear playlist") async def async_clear_playlist(self) -> None: """Clear players playlist.""" await self._player.clear_queue() - @catch_action_error("join players") - async def async_join_players(self, group_members: list[str]) -> None: - """Join `group_members` as a player group with the current player.""" - await self._group_manager.async_join_players( - self._player.player_id, group_members - ) - @catch_action_error("pause") async def async_media_pause(self) -> None: """Send pause command.""" @@ -273,14 +295,7 @@ class HeosMediaPlayer(MediaPlayerEntity): index = int(media_id) except ValueError: # Try finding index by name - index = next( - ( - index - for index, favorite in self._source_manager.favorites.items() - if favorite.name == media_id - ), - None, - ) + index = self.coordinator.async_get_favorite_index(media_id) if index is None: raise ValueError(f"Invalid favorite '{media_id}'") await self._player.play_preset_station(index) @@ -291,7 +306,14 @@ class HeosMediaPlayer(MediaPlayerEntity): @catch_action_error("select source") async def async_select_source(self, source: str) -> None: """Select input source.""" - await self._source_manager.play_source(source, self._player) + await self.coordinator.async_play_source(source, self._player) + + @catch_action_error("set repeat") + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set repeat mode.""" + await self._player.set_play_mode( + HA_HEOS_REPEAT_TYPE_MAP[repeat], self._player.shuffle + ) @catch_action_error("set shuffle") async def async_set_shuffle(self, shuffle: bool) -> None: @@ -303,18 +325,45 @@ class HeosMediaPlayer(MediaPlayerEntity): """Set volume level, range 0..1.""" await self._player.set_volume(int(volume * 100)) - async def async_update(self) -> None: - """Update supported features of the player.""" - controls = self._player.now_playing_media.supported_controls - current_support = [CONTROL_TO_SUPPORT[control] for control in controls] - self._attr_supported_features = reduce( - ior, current_support, BASE_SUPPORTED_FEATURES - ) + @catch_action_error("join players") + async def async_join_players(self, group_members: list[str]) -> None: + """Join `group_members` as a player group with the current player.""" + player_ids: list[int] = [self._player.player_id] + # Resolve entity_ids to player_ids + entity_registry = er.async_get(self.hass) + for entity_id in group_members: + entity_entry = entity_registry.async_get(entity_id) + if entity_entry is None: + raise ServiceValidationError( + translation_domain=HEOS_DOMAIN, + translation_key="entity_not_found", + translation_placeholders={"entity_id": entity_id}, + ) + if entity_entry.platform != HEOS_DOMAIN: + raise ServiceValidationError( + translation_domain=HEOS_DOMAIN, + translation_key="not_heos_media_player", + translation_placeholders={"entity_id": entity_id}, + ) + player_id = int(entity_entry.unique_id) + if player_id not in player_ids: + player_ids.append(player_id) + await self.coordinator.heos.set_group(player_ids) @catch_action_error("unjoin player") async def async_unjoin_player(self) -> None: """Remove this player from any group.""" - await self._group_manager.async_unjoin_player(self._player.player_id) + for group in self.coordinator.heos.groups.values(): + if group.lead_player_id == self._player.player_id: + # Player is the group leader, this effectively removes the group. + await self.coordinator.heos.set_group([self._player.player_id]) + return + if self._player.player_id in group.member_player_ids: + # Player is a group member, update the group to exclude it + new_members = [group.lead_player_id, *group.member_player_ids] + new_members.remove(self._player.player_id) + await self.coordinator.heos.set_group(new_members) + return @property def available(self) -> bool: @@ -332,11 +381,6 @@ class HeosMediaPlayer(MediaPlayerEntity): "media_type": self._player.now_playing_media.type, } - @property - def group_members(self) -> list[str]: - """List of players which are grouped together.""" - return self._group_manager.group_membership.get(self.entity_id, []) - @property def is_volume_muted(self) -> bool: """Boolean if volume is currently muted.""" @@ -398,16 +442,6 @@ class HeosMediaPlayer(MediaPlayerEntity): """Boolean if shuffle is enabled.""" return self._player.shuffle - @property - def source(self) -> str: - """Name of the current input source.""" - return self._source_manager.get_current_source(self._player.now_playing_media) - - @property - def source_list(self) -> list[str]: - """List of available input sources.""" - return self._source_manager.source_list - @property def state(self) -> MediaPlayerState: """State of the player.""" diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml index 81162ab9b97..d48bcc492cd 100644 --- a/homeassistant/components/heos/quality_scale.yaml +++ b/homeassistant/components/heos/quality_scale.yaml @@ -5,7 +5,7 @@ rules: status: done comment: Integration is a local push integration brands: done - common-modules: todo + common-modules: done config-flow-test-coverage: done config-flow: status: done @@ -29,10 +29,7 @@ rules: docs-installation-parameters: done entity-unavailable: done integration-owner: done - log-when-unavailable: - status: todo - comment: | - The integration currently spams the logs until reconnected + log-when-unavailable: done parallel-updates: done reauthentication-flow: done test-coverage: diff --git a/homeassistant/components/heos/services.py b/homeassistant/components/heos/services.py index a780c26fca6..5a0105f830e 100644 --- a/homeassistant/components/heos/services.py +++ b/homeassistant/components/heos/services.py @@ -64,7 +64,7 @@ def _get_controller(hass: HomeAssistant) -> Heos: raise HomeAssistantError( translation_domain=DOMAIN, translation_key="integration_not_loaded" ) - return entry.runtime_data.controller_manager.controller + return entry.runtime_data.heos async def _sign_in_handler(service: ServiceCall) -> None: diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index e99d8f7e7fb..907804d10e1 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -94,9 +94,15 @@ "action_error": { "message": "Unable to {action}: {error}" }, + "entity_not_found": { + "message": "Entity {entity_id} was not found" + }, "integration_not_loaded": { "message": "The HEOS integration is not loaded" }, + "not_heos_media_player": { + "message": "Entity {entity_id} is not a HEOS media player entity" + }, "unknown_source": { "message": "Unknown source: {source}" } diff --git a/homeassistant/components/holiday/config_flow.py b/homeassistant/components/holiday/config_flow.py index 6d29e09c0f8..538d9971109 100644 --- a/homeassistant/components/holiday/config_flow.py +++ b/homeassistant/components/holiday/config_flow.py @@ -89,6 +89,19 @@ def get_options_schema(country: str) -> vol.Schema: return vol.Schema(schema) +def get_entry_name(language: str, country: str, province: str | None) -> str: + """Generate the entity name from the user language and location.""" + try: + locale = Locale.parse(language, sep="-") + except UnknownLocaleError: + # Default to (US) English if language not recognized by babel + # Mainly an issue with English flavors such as "en-GB" + locale = Locale("en") + country_str = locale.territories[country] # blocking I/O + province_str = f", {province}" if province else "" + return f"{country_str}{province_str}" + + class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Holiday.""" @@ -159,15 +172,9 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): self._async_abort_entries_match({**data, **(options or {})}) - try: - locale = Locale.parse(self.hass.config.language, sep="-") - except UnknownLocaleError: - # Default to (US) English if language not recognized by babel - # Mainly an issue with English flavors such as "en-GB" - locale = Locale("en") - province_str = f", {province}" if province else "" - name = f"{locale.territories[country]}{province_str}" - + name = await self.hass.async_add_executor_job( + get_entry_name, self.hass.config.language, country, province + ) return self.async_create_entry(title=name, data=data, options=options) options_schema = await self.hass.async_add_executor_job( @@ -196,14 +203,9 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): self._async_abort_entries_match({**data, **(options or {})}) - try: - locale = Locale.parse(self.hass.config.language, sep="-") - except UnknownLocaleError: - # Default to (US) English if language not recognized by babel - # Mainly an issue with English flavors such as "en-GB" - locale = Locale("en") - province_str = f", {province}" if province else "" - name = f"{locale.territories[country]}{province_str}" + name = await self.hass.async_add_executor_job( + get_entry_name, self.hass.config.language, country, province + ) if options: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/homeassistant/triggers/time.py b/homeassistant/components/homeassistant/triggers/time.py index bea6e8a66a7..5cd1921d8a8 100644 --- a/homeassistant/components/homeassistant/triggers/time.py +++ b/homeassistant/components/homeassistant/triggers/time.py @@ -42,7 +42,7 @@ _TIME_AT_SCHEMA = vol.Any(cv.time, _TIME_TRIGGER_ENTITY) _TIME_TRIGGER_ENTITY_WITH_OFFSET = vol.Schema( { - vol.Required(CONF_ENTITY_ID): cv.entity_domain(["sensor"]), + vol.Required(CONF_ENTITY_ID): cv.entity_domain(["input_datetime", "sensor"]), vol.Optional(CONF_OFFSET): cv.time_period, } ) @@ -156,14 +156,17 @@ async def async_attach_trigger( if has_date: # If input_datetime has date, then track point in time. - trigger_dt = datetime( - year, - month, - day, - hour, - minute, - second, - tzinfo=dt_util.get_default_time_zone(), + trigger_dt = ( + datetime( + year, + month, + day, + hour, + minute, + second, + tzinfo=dt_util.get_default_time_zone(), + ) + + offset ) # Only set up listener if time is now or in the future. if trigger_dt >= dt_util.now(): @@ -178,6 +181,17 @@ async def async_attach_trigger( ) elif has_time: # Else if it has time, then track time change. + if offset != timedelta(0): + # Create a temporary datetime object to get an offset. + temp_dt = dt_util.now().replace( + hour=hour, minute=minute, second=second, microsecond=0 + ) + temp_dt += offset + # Ignore the date and apply the offset even if it wraps + # around to the next day. + hour = temp_dt.hour + minute = temp_dt.minute + second = temp_dt.second remove = async_track_time_change( hass, partial( diff --git a/homeassistant/components/homee/const.py b/homeassistant/components/homee/const.py index 8595f042af8..d1d5be97ef7 100644 --- a/homeassistant/components/homee/const.py +++ b/homeassistant/components/homee/const.py @@ -3,6 +3,7 @@ from homeassistant.const import ( LIGHT_LUX, PERCENTAGE, + REVOLUTIONS_PER_MINUTE, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, @@ -24,6 +25,7 @@ HOMEE_UNIT_TO_HA_UNIT = { "%": PERCENTAGE, "lx": LIGHT_LUX, "klx": LIGHT_LUX, + "1/min": REVOLUTIONS_PER_MINUTE, "A": UnitOfElectricCurrent.AMPERE, "V": UnitOfElectricPotential.VOLT, "kWh": UnitOfEnergy.KILO_WATT_HOUR, diff --git a/homeassistant/components/homee/cover.py b/homeassistant/components/homee/cover.py index b594b23cc59..b4a853f7c35 100644 --- a/homeassistant/components/homee/cover.py +++ b/homeassistant/components/homee/cover.py @@ -121,14 +121,15 @@ class HomeeCover(HomeeNodeEntity, CoverEntity): def current_cover_position(self) -> int | None: """Return the cover's position.""" # Translate the homee position values to HA's 0-100 scale - if self.has_attribute(AttributeType.POSITION): - attribute = self._node.get_attribute_by_type(AttributeType.POSITION) + if ( + attribute := self._node.get_attribute_by_type(AttributeType.POSITION) + ) is not None: homee_min = attribute.minimum homee_max = attribute.maximum homee_position = attribute.current_value position = ((homee_position - homee_min) / (homee_max - homee_min)) * 100 - return 100 - position + return int(100 - position) return None @@ -136,16 +137,17 @@ class HomeeCover(HomeeNodeEntity, CoverEntity): def current_cover_tilt_position(self) -> int | None: """Return the cover's tilt position.""" # Translate the homee position values to HA's 0-100 scale - if self.has_attribute(AttributeType.SHUTTER_SLAT_POSITION): - attribute = self._node.get_attribute_by_type( + if ( + attribute := self._node.get_attribute_by_type( AttributeType.SHUTTER_SLAT_POSITION ) + ) is not None: homee_min = attribute.minimum homee_max = attribute.maximum homee_position = attribute.current_value position = ((homee_position - homee_min) / (homee_max - homee_min)) * 100 - return 100 - position + return int(100 - position) return None @@ -176,8 +178,9 @@ class HomeeCover(HomeeNodeEntity, CoverEntity): @property def is_closed(self) -> bool | None: """Return if the cover is closed.""" - if self.has_attribute(AttributeType.POSITION): - attribute = self._node.get_attribute_by_type(AttributeType.POSITION) + if ( + attribute := self._node.get_attribute_by_type(AttributeType.POSITION) + ) is not None: return attribute.get_value() == attribute.maximum if self._open_close_attribute is not None: @@ -187,10 +190,11 @@ class HomeeCover(HomeeNodeEntity, CoverEntity): return self._open_close_attribute.get_value() == 0 # If none of the above is present, it might be a slat only cover. - if self.has_attribute(AttributeType.SHUTTER_SLAT_POSITION): - attribute = self._node.get_attribute_by_type( + if ( + attribute := self._node.get_attribute_by_type( AttributeType.SHUTTER_SLAT_POSITION ) + ) is not None: return attribute.get_value() == attribute.minimum return None @@ -217,12 +221,14 @@ class HomeeCover(HomeeNodeEntity, CoverEntity): position = 100 - cast(int, kwargs[ATTR_POSITION]) # Convert position to range of our entity. - attribute = self._node.get_attribute_by_type(AttributeType.POSITION) - homee_min = attribute.minimum - homee_max = attribute.maximum - homee_position = (position / 100) * (homee_max - homee_min) + homee_min + if ( + attribute := self._node.get_attribute_by_type(AttributeType.POSITION) + ) is not None: + homee_min = attribute.minimum + homee_max = attribute.maximum + homee_position = (position / 100) * (homee_max - homee_min) + homee_min - await self.async_set_value(attribute, homee_position) + await self.async_set_value(attribute, homee_position) async def async_stop_cover(self, **kwargs: Any) -> None: """Stop the cover.""" @@ -231,23 +237,27 @@ class HomeeCover(HomeeNodeEntity, CoverEntity): async def async_open_cover_tilt(self, **kwargs: Any) -> None: """Open the cover tilt.""" - slat_attribute = self._node.get_attribute_by_type( - AttributeType.SLAT_ROTATION_IMPULSE - ) - if not slat_attribute.is_reversed: - await self.async_set_value(slat_attribute, 2) - else: - await self.async_set_value(slat_attribute, 1) + if ( + slat_attribute := self._node.get_attribute_by_type( + AttributeType.SLAT_ROTATION_IMPULSE + ) + ) is not None: + if not slat_attribute.is_reversed: + await self.async_set_value(slat_attribute, 2) + else: + await self.async_set_value(slat_attribute, 1) async def async_close_cover_tilt(self, **kwargs: Any) -> None: """Close the cover tilt.""" - slat_attribute = self._node.get_attribute_by_type( - AttributeType.SLAT_ROTATION_IMPULSE - ) - if not slat_attribute.is_reversed: - await self.async_set_value(slat_attribute, 1) - else: - await self.async_set_value(slat_attribute, 2) + if ( + slat_attribute := self._node.get_attribute_by_type( + AttributeType.SLAT_ROTATION_IMPULSE + ) + ) is not None: + if not slat_attribute.is_reversed: + await self.async_set_value(slat_attribute, 1) + else: + await self.async_set_value(slat_attribute, 2) async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover tilt to a specific position.""" @@ -255,11 +265,13 @@ class HomeeCover(HomeeNodeEntity, CoverEntity): position = 100 - cast(int, kwargs[ATTR_TILT_POSITION]) # Convert position to range of our entity. - attribute = self._node.get_attribute_by_type( - AttributeType.SHUTTER_SLAT_POSITION - ) - homee_min = attribute.minimum - homee_max = attribute.maximum - homee_position = (position / 100) * (homee_max - homee_min) + homee_min + if ( + attribute := self._node.get_attribute_by_type( + AttributeType.SHUTTER_SLAT_POSITION + ) + ) is not None: + homee_min = attribute.minimum + homee_max = attribute.maximum + homee_position = (position / 100) * (homee_max - homee_min) + homee_min - await self.async_set_value(attribute, homee_position) + await self.async_set_value(attribute, homee_position) diff --git a/homeassistant/components/homee/entity.py b/homeassistant/components/homee/entity.py index 2af01358752..a6cd54354bf 100644 --- a/homeassistant/components/homee/entity.py +++ b/homeassistant/components/homee/entity.py @@ -73,13 +73,20 @@ class HomeeNodeEntity(Entity): self._attr_unique_id = f"{entry.unique_id}-{node.id}" self._entry = entry - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, str(node.id))}, - name=node.name, - model=get_name_for_enum(NodeProfile, node.profile), - sw_version=self._get_software_version(), - via_device=(DOMAIN, entry.runtime_data.settings.uid), - ) + ## Homee hub itself has node-id -1 + if node.id == -1: + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, entry.runtime_data.settings.uid)}, + ) + else: + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{entry.unique_id}-{node.id}")}, + name=node.name, + model=get_name_for_enum(NodeProfile, node.profile), + sw_version=self._get_software_version(), + via_device=(DOMAIN, entry.runtime_data.settings.uid), + ) + self._host_connected = entry.runtime_data.connected async def async_added_to_hass(self) -> None: @@ -91,23 +98,6 @@ class HomeeNodeEntity(Entity): ) ) - @property - def device_info(self) -> DeviceInfo: - """Return the device info.""" - # Homee hub has id -1, but is identified only by the UID. - if self._node.id == -1: - return DeviceInfo( - identifiers={(DOMAIN, self._entry.runtime_data.settings.uid)}, - ) - - return DeviceInfo( - identifiers={(DOMAIN, f"{self._entry.unique_id}-{self._node.id}")}, - name=self._node.name, - model=get_name_for_enum(NodeProfile, self._node.profile), - sw_version=self._get_software_version(), - via_device=(DOMAIN, self._entry.runtime_data.settings.uid), - ) - @property def available(self) -> bool: """Return the availability of the underlying node.""" @@ -122,18 +112,26 @@ class HomeeNodeEntity(Entity): def _get_software_version(self) -> str | None: """Return the software version of the node.""" - if self.has_attribute(AttributeType.FIRMWARE_REVISION): - return self._node.get_attribute_by_type( + if ( + attribute := self._node.get_attribute_by_type( AttributeType.FIRMWARE_REVISION - ).get_value() - if self.has_attribute(AttributeType.SOFTWARE_REVISION): - return self._node.get_attribute_by_type( + ) + ) is not None: + return str(attribute.get_value()) + if ( + attribute := self._node.get_attribute_by_type( AttributeType.SOFTWARE_REVISION - ).get_value() + ) + ) is not None: + return str(attribute.get_value()) + return None def has_attribute(self, attribute_type: AttributeType) -> bool: """Check if an attribute of the given type exists.""" + if self._node.attribute_map is None: + return False + return attribute_type in self._node.attribute_map async def async_set_value(self, attribute: HomeeAttribute, value: float) -> None: diff --git a/homeassistant/components/homee/manifest.json b/homeassistant/components/homee/manifest.json index 5869a9760ea..6d03547efc9 100644 --- a/homeassistant/components/homee/manifest.json +++ b/homeassistant/components/homee/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["homee"], "quality_scale": "bronze", - "requirements": ["pyHomee==1.2.0"] + "requirements": ["pyHomee==1.2.3"] } diff --git a/homeassistant/components/homee/quality_scale.yaml b/homeassistant/components/homee/quality_scale.yaml index 96d4678b420..ff99d177018 100644 --- a/homeassistant/components/homee/quality_scale.yaml +++ b/homeassistant/components/homee/quality_scale.yaml @@ -52,12 +52,12 @@ rules: docs-troubleshooting: todo docs-use-cases: todo dynamic-devices: todo - entity-category: todo - entity-device-class: todo - entity-disabled-by-default: todo - entity-translations: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done exception-translations: todo - icon-translations: todo + icon-translations: done reconfiguration-flow: todo repair-issues: todo stale-devices: todo @@ -65,4 +65,4 @@ rules: # Platinum async-dependency: todo inject-websession: todo - strict-typing: todo + strict-typing: done diff --git a/homeassistant/components/homee/sensor.py b/homeassistant/components/homee/sensor.py index 75b11811460..9b8fb0f6fe1 100644 --- a/homeassistant/components/homee/sensor.py +++ b/homeassistant/components/homee/sensor.py @@ -99,9 +99,29 @@ SENSOR_DESCRIPTIONS: dict[AttributeType, HomeeSensorEntityDescription] = { device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), + AttributeType.EXHAUST_MOTOR_REVS: HomeeSensorEntityDescription( + key="exhaust_motor_revs", + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.INDOOR_RELATIVE_HUMIDITY: HomeeSensorEntityDescription( + key="indoor_humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + AttributeType.INDOOR_TEMPERATURE: HomeeSensorEntityDescription( + key="indoor_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + AttributeType.INTAKE_MOTOR_REVS: HomeeSensorEntityDescription( + key="intake_motor_revs", + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + ), AttributeType.LEVEL: HomeeSensorEntityDescription( key="level", - device_class=SensorDeviceClass.VOLUME, + device_class=SensorDeviceClass.VOLUME_STORAGE, state_class=SensorStateClass.MEASUREMENT, ), AttributeType.LINK_QUALITY: HomeeSensorEntityDescription( @@ -109,6 +129,22 @@ SENSOR_DESCRIPTIONS: dict[AttributeType, HomeeSensorEntityDescription] = { entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), + AttributeType.OPERATING_HOURS: HomeeSensorEntityDescription( + key="operating_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL_INCREASING, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.OUTDOOR_RELATIVE_HUMIDITY: HomeeSensorEntityDescription( + key="outdoor_humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + AttributeType.OUTDOOR_TEMPERATURE: HomeeSensorEntityDescription( + key="outdoor_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), AttributeType.POSITION: HomeeSensorEntityDescription( key="position", state_class=SensorStateClass.MEASUREMENT, @@ -216,6 +252,7 @@ NODE_SENSOR_DESCRIPTIONS: tuple[HomeeNodeSensorEntityDescription, ...] = ( ], entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, + translation_key="node_sensor_state", value_fn=lambda node: get_name_for_enum(NodeState, node.state), ), ) @@ -263,7 +300,7 @@ class HomeeSensor(HomeeEntity, SensorEntity): self.entity_description = description self._attr_translation_key = description.key if attribute.instance > 0: - self._attr_translation_key = f"{description.translation_key}_instance" + self._attr_translation_key = f"{self._attr_translation_key}_instance" self._attr_translation_placeholders = {"instance": str(attribute.instance)} @property @@ -293,7 +330,6 @@ class HomeeNodeSensor(HomeeNodeEntity, SensorEntity): """Initialize a homee node sensor entity.""" super().__init__(node, entry) self.entity_description = description - self._attr_translation_key = f"node_{description.key}" self._node = node self._attr_unique_id = f"{self._attr_unique_id}-{description.key}" diff --git a/homeassistant/components/homee/strings.json b/homeassistant/components/homee/strings.json index a657465126b..401996622f2 100644 --- a/homeassistant/components/homee/strings.json +++ b/homeassistant/components/homee/strings.json @@ -42,6 +42,24 @@ "energy_instance": { "name": "Energy {instance}" }, + "exhaust_motor_revs": { + "name": "Exhaust motor speed" + }, + "indoor_humidity": { + "name": "Indoor humidity" + }, + "indoor_humidity_instance": { + "name": "Indoor humidity {instance}" + }, + "indoor_temperature": { + "name": "Indoor temperature" + }, + "indoor_temperature_instance": { + "name": "Indoor temperature {instance}" + }, + "intake_motor_revs": { + "name": "Intake motor speed" + }, "level": { "name": "Level" }, @@ -51,6 +69,21 @@ "node_state": { "name": "Node state" }, + "operating_hours": { + "name": "Operating hours" + }, + "outdoor_humidity": { + "name": "Outdoor humidity" + }, + "outdoor_humidity_instance": { + "name": "Outdoor humidity {instance}" + }, + "outdoor_temperature": { + "name": "Outdoor temperature" + }, + "outdoor_temperature_instance": { + "name": "Outdoor temperature {instance}" + }, "position": { "name": "Position" }, diff --git a/homeassistant/components/honeywell/manifest.json b/homeassistant/components/honeywell/manifest.json index 4a50e326965..36a4f497601 100644 --- a/homeassistant/components/honeywell/manifest.json +++ b/homeassistant/components/honeywell/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/honeywell", "iot_class": "cloud_polling", "loggers": ["somecomfort"], - "requirements": ["AIOSomecomfort==0.0.28"] + "requirements": ["AIOSomecomfort==0.0.30"] } diff --git a/homeassistant/components/husqvarna_automower/coordinator.py b/homeassistant/components/husqvarna_automower/coordinator.py index 2921b5ca68e..a587b4f3821 100644 --- a/homeassistant/components/husqvarna_automower/coordinator.py +++ b/homeassistant/components/husqvarna_automower/coordinator.py @@ -9,10 +9,10 @@ import logging from typing import TYPE_CHECKING from aioautomower.exceptions import ( - ApiException, - AuthException, + ApiError, + AuthError, + HusqvarnaTimeoutError, HusqvarnaWSServerHandshakeError, - TimeoutException, ) from aioautomower.model import MowerAttributes from aioautomower.session import AutomowerSession @@ -64,9 +64,9 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib self.ws_connected = True try: data = await self.api.get_status() - except ApiException as err: + except ApiError as err: raise UpdateFailed(err) from err - except AuthException as err: + except AuthError as err: raise ConfigEntryAuthFailed(err) from err self._async_add_remove_devices(data) @@ -100,7 +100,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib "Failed to connect to websocket. Trying to reconnect: %s", err, ) - except TimeoutException as err: + except HusqvarnaTimeoutError as err: _LOGGER.debug( "Failed to listen to websocket. Trying to reconnect: %s", err, diff --git a/homeassistant/components/husqvarna_automower/entity.py b/homeassistant/components/husqvarna_automower/entity.py index 5b5156e5f1d..150a3d18d87 100644 --- a/homeassistant/components/husqvarna_automower/entity.py +++ b/homeassistant/components/husqvarna_automower/entity.py @@ -8,7 +8,7 @@ import functools import logging from typing import TYPE_CHECKING, Any, Concatenate -from aioautomower.exceptions import ApiException +from aioautomower.exceptions import ApiError from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea from homeassistant.core import callback @@ -67,7 +67,7 @@ def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P]( async def wrapper(self: _Entity, *args: _P.args, **kwargs: _P.kwargs) -> None: try: await func(self, *args, **kwargs) - except ApiException as exception: + except ApiError as exception: raise HomeAssistantError( translation_domain=DOMAIN, translation_key="command_send_failed", diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 1eed2be4575..0eabf5ec0d6 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_push", "loggers": ["aioautomower"], "quality_scale": "silver", - "requirements": ["aioautomower==2025.1.0"] + "requirements": ["aioautomower==2025.1.1"] } diff --git a/homeassistant/components/husqvarna_automower/switch.py b/homeassistant/components/husqvarna_automower/switch.py index b8004e17066..d55d51b42fe 100644 --- a/homeassistant/components/husqvarna_automower/switch.py +++ b/homeassistant/components/husqvarna_automower/switch.py @@ -165,14 +165,14 @@ class StayOutZoneSwitchEntity(AutomowerControlEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" await self.coordinator.api.commands.switch_stay_out_zone( - self.mower_id, self.stay_out_zone_uid, False + self.mower_id, self.stay_out_zone_uid, switch=False ) @handle_sending_exception(poll_after_sending=True) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" await self.coordinator.api.commands.switch_stay_out_zone( - self.mower_id, self.stay_out_zone_uid, True + self.mower_id, self.stay_out_zone_uid, switch=True ) diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index 34c31d3ad16..83e8a8325f9 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -68,7 +68,7 @@ ZONE_BINARY_SENSORS: tuple[HydrawiseBinarySensorEntityDescription, ...] = ( ) SCHEMA_START_WATERING: VolDictType = { - vol.Optional("duration"): vol.All(vol.Coerce(int), vol.Range(min=0, max=90)), + vol.Optional("duration"): vol.All(vol.Coerce(int), vol.Range(min=0, max=1440)), } SCHEMA_SUSPEND: VolDictType = { vol.Required("until"): cv.datetime, diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index 5af32af3951..ed21e96cd0b 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Callable, Mapping +from collections.abc import Mapping from typing import Any from aiohttp import ClientError @@ -10,85 +10,104 @@ from pydrawise import auth as pydrawise_auth, client from pydrawise.exceptions import NotAuthorizedError import voluptuous as vol -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import APP_ID, DOMAIN, LOGGER +STEP_USER_DATA_SCHEMA = vol.Schema( + {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} +) +STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) + class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Hydrawise.""" VERSION = 1 - async def _create_or_update_entry( - self, - username: str, - password: str, - *, - on_failure: Callable[[str], ConfigFlowResult], - ) -> ConfigFlowResult: - """Create the config entry.""" - # Verify that the provided credentials work.""" - auth = pydrawise_auth.Auth(username, password) - try: - await auth.token() - except NotAuthorizedError: - return on_failure("invalid_auth") - except TimeoutError: - return on_failure("timeout_connect") - - try: - api = client.Hydrawise(auth, app_id=APP_ID) - # Don't fetch zones because we don't need them yet. - user = await api.get_user(fetch_zones=False) - except TimeoutError: - return on_failure("timeout_connect") - except ClientError as ex: - LOGGER.error("Unable to connect to Hydrawise cloud service: %s", ex) - return on_failure("cannot_connect") - - await self.async_set_unique_id(f"hydrawise-{user.customer_id}") - - if self.source != SOURCE_REAUTH: - self._abort_if_unique_id_configured() - return self.async_create_entry( - title="Hydrawise", - data={CONF_USERNAME: username, CONF_PASSWORD: password}, - ) - - return self.async_update_reload_and_abort( - self._get_reauth_entry(), - data_updates={CONF_USERNAME: username, CONF_PASSWORD: password}, - ) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial setup.""" - if user_input is not None: - username = user_input[CONF_USERNAME] - password = user_input[CONF_PASSWORD] + if user_input is None: + return self._show_user_form({}) + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] + unique_id, errors = await _authenticate(username, password) + if errors: + return self._show_user_form(errors) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + return self.async_create_entry( + title=username, + data={CONF_USERNAME: username, CONF_PASSWORD: password}, + ) - return await self._create_or_update_entry( - username=username, password=password, on_failure=self._show_form - ) - return self._show_form() - - def _show_form(self, error_type: str | None = None) -> ConfigFlowResult: - errors = {} - if error_type is not None: - errors["base"] = error_type + def _show_user_form(self, errors: dict[str, str]) -> ConfigFlowResult: return self.async_show_form( - step_id="user", - data_schema=vol.Schema( - {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} - ), - errors=errors, + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: - """Perform reauth after updating config to username/password.""" - return await self.async_step_user() + """Handle reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + if user_input is None: + return self._show_reauth_form({}) + + reauth_entry = self._get_reauth_entry() + username = reauth_entry.data[CONF_USERNAME] + password = user_input[CONF_PASSWORD] + user_id, errors = await _authenticate(username, password) + if user_id is None: + return self._show_reauth_form(errors) + + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + reauth_entry, data={CONF_USERNAME: username, CONF_PASSWORD: password} + ) + + def _show_reauth_form(self, errors: dict[str, str]) -> ConfigFlowResult: + return self.async_show_form( + step_id="reauth_confirm", data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors + ) + + +async def _authenticate( + username: str, password: str +) -> tuple[str | None, dict[str, str]]: + """Authenticate with the Hydrawise API.""" + unique_id = None + errors: dict[str, str] = {} + auth = pydrawise_auth.Auth(username, password) + try: + await auth.token() + except NotAuthorizedError: + errors["base"] = "invalid_auth" + except TimeoutError: + errors["base"] = "timeout_connect" + + if errors: + return unique_id, errors + + try: + api = client.Hydrawise(auth, app_id=APP_ID) + # Don't fetch zones because we don't need them yet. + user = await api.get_user(fetch_zones=False) + except TimeoutError: + errors["base"] = "timeout_connect" + except ClientError as ex: + LOGGER.error("Unable to connect to Hydrawise cloud service: %s", ex) + errors["base"] = "cannot_connect" + else: + unique_id = f"hydrawise-{user.customer_id}" + + return unique_id, errors diff --git a/homeassistant/components/hydrawise/services.yaml b/homeassistant/components/hydrawise/services.yaml index 64c04901816..bf90a8e23b3 100644 --- a/homeassistant/components/hydrawise/services.yaml +++ b/homeassistant/components/hydrawise/services.yaml @@ -10,7 +10,7 @@ start_watering: selector: number: min: 0 - max: 90 + max: 1440 unit_of_measurement: min mode: box suspend: diff --git a/homeassistant/components/hydrawise/strings.json b/homeassistant/components/hydrawise/strings.json index 4d50f10bcb2..74c63cbe758 100644 --- a/homeassistant/components/hydrawise/strings.json +++ b/homeassistant/components/hydrawise/strings.json @@ -8,6 +8,13 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The Hydrawise integration needs to re-authenticate your account", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { diff --git a/homeassistant/components/ibeacon/manifest.json b/homeassistant/components/ibeacon/manifest.json index 8bd7e3ab9cc..bdbdaea49d2 100644 --- a/homeassistant/components/ibeacon/manifest.json +++ b/homeassistant/components/ibeacon/manifest.json @@ -7,7 +7,7 @@ "manufacturer_data_start": [2, 21] } ], - "codeowners": ["@bdraco"], + "codeowners": [], "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/ibeacon", diff --git a/homeassistant/components/igloohome/manifest.json b/homeassistant/components/igloohome/manifest.json index 28e287db2ab..35c58479d75 100644 --- a/homeassistant/components/igloohome/manifest.json +++ b/homeassistant/components/igloohome/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/igloohome", "iot_class": "cloud_polling", "quality_scale": "bronze", - "requirements": ["igloohome-api==0.0.6"] + "requirements": ["igloohome-api==0.1.0"] } diff --git a/homeassistant/components/incomfort/__init__.py b/homeassistant/components/incomfort/__init__.py index e6775f5baca..249a0ae9085 100644 --- a/homeassistant/components/incomfort/__init__.py +++ b/homeassistant/components/incomfort/__init__.py @@ -3,15 +3,17 @@ from __future__ import annotations from aiohttp import ClientResponseError -from incomfortclient import IncomfortError, InvalidHeaterList +from incomfortclient import InvalidGateway, InvalidHeaterList from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import device_registry as dr +from .const import DOMAIN from .coordinator import InComfortDataCoordinator, async_connect_gateway -from .errors import InConfortTimeout, InConfortUnknownError, NoHeaters, NotFound +from .errors import InComfortTimeout, InComfortUnknownError, NoHeaters, NotFound PLATFORMS = ( Platform.WATER_HEATER, @@ -33,17 +35,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: InComfortConfigEntry) -> await heater.update() except InvalidHeaterList as exc: raise NoHeaters from exc - except IncomfortError as exc: - if isinstance(exc.message, ClientResponseError): - if exc.message.status == 401: - raise ConfigEntryAuthFailed("Incorrect credentials") from exc - if exc.message.status == 404: - raise NotFound from exc - raise InConfortUnknownError from exc + except InvalidGateway as exc: + raise ConfigEntryAuthFailed("Incorrect credentials") from exc + except ClientResponseError as exc: + if exc.status == 404: + raise NotFound from exc + raise InComfortUnknownError from exc except TimeoutError as exc: - raise InConfortTimeout from exc + raise InComfortTimeout from exc - coordinator = InComfortDataCoordinator(hass, data) + # Register discovered gateway device + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, entry.entry_id)}, + connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} + if entry.unique_id is not None + else set(), + manufacturer="Intergas", + name="RFGateway", + ) + coordinator = InComfortDataCoordinator(hass, data, entry.entry_id) entry.runtime_data = coordinator await coordinator.async_config_entry_first_refresh() @@ -51,6 +63,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: InComfortConfigEntry) -> return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: InComfortConfigEntry) -> bool: """Unload config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/incomfort/binary_sensor.py b/homeassistant/components/incomfort/binary_sensor.py index c4a23946bb2..e4353e457a5 100644 --- a/homeassistant/components/incomfort/binary_sensor.py +++ b/homeassistant/components/incomfort/binary_sensor.py @@ -102,7 +102,7 @@ class IncomfortBinarySensor(IncomfortBoilerEntity, BinarySensorEntity): @property def is_on(self) -> bool: """Return the status of the sensor.""" - return self._heater.status[self.entity_description.value_key] + return bool(self._heater.status[self.entity_description.value_key]) @property def extra_state_attributes(self) -> dict[str, Any] | None: diff --git a/homeassistant/components/incomfort/climate.py b/homeassistant/components/incomfort/climate.py index 756e14fc545..f814b1fb1f5 100644 --- a/homeassistant/components/incomfort/climate.py +++ b/homeassistant/components/incomfort/climate.py @@ -73,6 +73,8 @@ class InComfortClimate(IncomfortEntity, ClimateEntity): manufacturer="Intergas", name=f"Thermostat {room.room_no}", ) + if coordinator.unique_id: + self._attr_device_info["via_device"] = (DOMAIN, coordinator.unique_id) @property def extra_state_attributes(self) -> dict[str, Any]: @@ -106,7 +108,7 @@ class InComfortClimate(IncomfortEntity, ClimateEntity): async def async_set_temperature(self, **kwargs: Any) -> None: """Set a new target temperature for this zone.""" - temperature = kwargs.get(ATTR_TEMPERATURE) + temperature: float = kwargs[ATTR_TEMPERATURE] await self._room.set_override(temperature) await self.coordinator.async_refresh() diff --git a/homeassistant/components/incomfort/config_flow.py b/homeassistant/components/incomfort/config_flow.py index 3db8e40f9f4..8e4a5f72619 100644 --- a/homeassistant/components/incomfort/config_flow.py +++ b/homeassistant/components/incomfort/config_flow.py @@ -5,19 +5,20 @@ from __future__ import annotations from collections.abc import Mapping from typing import Any -from aiohttp import ClientResponseError -from incomfortclient import IncomfortError, InvalidHeaterList +from incomfortclient import InvalidGateway, InvalidHeaterList import voluptuous as vol from homeassistant.config_entries import ( SOURCE_RECONFIGURE, - ConfigEntry, + ConfigEntryState, ConfigFlow, ConfigFlowResult, OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import AbortFlow +from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.selector import ( BooleanSelector, BooleanSelectorConfig, @@ -25,7 +26,9 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo +from . import InComfortConfigEntry from .const import CONF_LEGACY_SETPOINT_STATUS, DOMAIN from .coordinator import async_connect_gateway @@ -45,6 +48,17 @@ CONFIG_SCHEMA = vol.Schema( } ) +DHCP_CONFIG_SCHEMA = vol.Schema( + { + vol.Optional(CONF_USERNAME): TextSelector( + TextSelectorConfig(type=TextSelectorType.TEXT, autocomplete="admin") + ), + vol.Optional(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } +) + REAUTH_SCHEMA = vol.Schema( { vol.Optional(CONF_PASSWORD): TextSelector( @@ -62,11 +76,6 @@ OPTIONS_SCHEMA = vol.Schema( } ) -ERROR_STATUS_MAPPING: dict[int, tuple[str, str]] = { - 401: (CONF_PASSWORD, "auth_error"), - 404: ("base", "not_found"), -} - async def async_try_connect_gateway( hass: HomeAssistant, config: dict[str, Any] @@ -74,15 +83,10 @@ async def async_try_connect_gateway( """Try to connect to the Lan2RF gateway.""" try: await async_connect_gateway(hass, config) + except InvalidGateway: + return {"base": "auth_error"} except InvalidHeaterList: return {"base": "no_heaters"} - except IncomfortError as exc: - if isinstance(exc.message, ClientResponseError): - scope, error = ERROR_STATUS_MAPPING.get( - exc.message.status, ("base", "unknown") - ) - return {scope: error} - return {"base": "unknown"} except TimeoutError: return {"base": "timeout_error"} except Exception: # noqa: BLE001 @@ -94,14 +98,77 @@ async def async_try_connect_gateway( class InComfortConfigFlow(ConfigFlow, domain=DOMAIN): """Config flow to set up an Intergas InComfort boyler and thermostats.""" + _discovered_host: str + @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: InComfortConfigEntry, ) -> InComfortOptionsFlowHandler: """Get the options flow for this handler.""" return InComfortOptionsFlowHandler() + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Prepare configuration for a DHCP discovered Intergas Gateway device.""" + self._discovered_host = discovery_info.ip + # In case we have an existing entry with the same host + # we update the entry with the unique_id for the gateway, and abort the flow + unique_id = format_mac(discovery_info.macaddress) + existing_entries_without_unique_id = [ + entry + for entry in self._async_current_entries(include_ignore=False) + if entry.unique_id is None + and entry.data.get(CONF_HOST) == self._discovered_host + and entry.state is ConfigEntryState.LOADED + ] + if existing_entries_without_unique_id: + self.hass.config_entries.async_update_entry( + existing_entries_without_unique_id[0], unique_id=unique_id + ) + self.hass.config_entries.async_schedule_reload( + existing_entries_without_unique_id[0].entry_id + ) + raise AbortFlow("already_configured") + + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured(updates={CONF_HOST: self._discovered_host}) + + return await self.async_step_dhcp_confirm() + + async def async_step_dhcp_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm setup from discovery.""" + if user_input is not None: + return await self.async_step_dhcp_auth({CONF_HOST: self._discovered_host}) + return self.async_show_form( + step_id="dhcp_confirm", + description_placeholders={CONF_HOST: self._discovered_host}, + ) + + async def async_step_dhcp_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial set up via DHCP.""" + errors: dict[str, str] | None = None + data_schema: vol.Schema = DHCP_CONFIG_SCHEMA + if user_input is not None: + user_input[CONF_HOST] = self._discovered_host + if ( + errors := await async_try_connect_gateway(self.hass, user_input) + ) is None: + return self.async_create_entry(title=TITLE, data=user_input) + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + + return self.async_show_form( + step_id="dhcp_auth", + data_schema=data_schema, + errors=errors, + description_placeholders={CONF_HOST: self._discovered_host}, + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/incomfort/coordinator.py b/homeassistant/components/incomfort/coordinator.py index 20cc8e7cc69..3436d40298a 100644 --- a/homeassistant/components/incomfort/coordinator.py +++ b/homeassistant/components/incomfort/coordinator.py @@ -9,7 +9,7 @@ from aiohttp import ClientResponseError from incomfortclient import ( Gateway as InComfortGateway, Heater as InComfortHeater, - IncomfortError, + InvalidHeaterList, ) from homeassistant.const import CONF_HOST @@ -50,8 +50,11 @@ async def async_connect_gateway( class InComfortDataCoordinator(DataUpdateCoordinator[InComfortData]): """Data coordinator for InComfort entities.""" - def __init__(self, hass: HomeAssistant, incomfort_data: InComfortData) -> None: + def __init__( + self, hass: HomeAssistant, incomfort_data: InComfortData, unique_id: str | None + ) -> None: """Initialize coordinator.""" + self.unique_id = unique_id super().__init__( hass, _LOGGER, @@ -67,9 +70,10 @@ class InComfortDataCoordinator(DataUpdateCoordinator[InComfortData]): await heater.update() except TimeoutError as exc: raise UpdateFailed("Timeout error") from exc - except IncomfortError as exc: - if isinstance(exc.message, ClientResponseError): - if exc.message.status == 401: - raise ConfigEntryError("Incorrect credentials") from exc + except ClientResponseError as exc: + if exc.status == 401: + raise ConfigEntryError("Incorrect credentials") from exc + raise UpdateFailed(exc.message) from exc + except InvalidHeaterList as exc: raise UpdateFailed(exc.message) from exc return self.incomfort_data diff --git a/homeassistant/components/incomfort/entity.py b/homeassistant/components/incomfort/entity.py index dd662b411dd..1924c91376b 100644 --- a/homeassistant/components/incomfort/entity.py +++ b/homeassistant/components/incomfort/entity.py @@ -28,3 +28,5 @@ class IncomfortBoilerEntity(IncomfortEntity): name="Boiler", serial_number=heater.serial_no, ) + if coordinator.unique_id: + self._attr_device_info["via_device"] = (DOMAIN, coordinator.unique_id) diff --git a/homeassistant/components/incomfort/errors.py b/homeassistant/components/incomfort/errors.py index 93a29d05bb8..c367916d6c7 100644 --- a/homeassistant/components/incomfort/errors.py +++ b/homeassistant/components/incomfort/errors.py @@ -1,32 +1,33 @@ """Exceptions raised by Intergas InComfort integration.""" -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError +from .const import DOMAIN + class NotFound(HomeAssistantError): """Raise exception if no Lan2RF Gateway was found.""" - translation_domain = HOMEASSISTANT_DOMAIN + translation_domain = DOMAIN translation_key = "not_found" class NoHeaters(ConfigEntryNotReady): """Raise exception if no heaters are found.""" - translation_domain = HOMEASSISTANT_DOMAIN + translation_domain = DOMAIN translation_key = "no_heaters" -class InConfortTimeout(ConfigEntryNotReady): +class InComfortTimeout(ConfigEntryNotReady): """Raise exception if no heaters are found.""" - translation_domain = HOMEASSISTANT_DOMAIN + translation_domain = DOMAIN translation_key = "timeout_error" -class InConfortUnknownError(ConfigEntryNotReady): +class InComfortUnknownError(ConfigEntryNotReady): """Raise exception if no heaters are found.""" - translation_domain = HOMEASSISTANT_DOMAIN + translation_domain = DOMAIN translation_key = "unknown" diff --git a/homeassistant/components/incomfort/manifest.json b/homeassistant/components/incomfort/manifest.json index f404f33b970..f4d752bfa48 100644 --- a/homeassistant/components/incomfort/manifest.json +++ b/homeassistant/components/incomfort/manifest.json @@ -3,8 +3,12 @@ "name": "Intergas InComfort/Intouch Lan2RF gateway", "codeowners": ["@jbouwh"], "config_flow": true, + "dhcp": [ + { "hostname": "rfgateway", "macaddress": "0004A3*" }, + { "registered_devices": true } + ], "documentation": "https://www.home-assistant.io/integrations/incomfort", "iot_class": "local_polling", "loggers": ["incomfortclient"], - "requirements": ["incomfort-client==0.6.4"] + "requirements": ["incomfort-client==0.6.7"] } diff --git a/homeassistant/components/incomfort/sensor.py b/homeassistant/components/incomfort/sensor.py index e9697a0036f..e3f3fc785b2 100644 --- a/homeassistant/components/incomfort/sensor.py +++ b/homeassistant/components/incomfort/sensor.py @@ -99,7 +99,7 @@ class IncomfortSensor(IncomfortBoilerEntity, SensorEntity): @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self._heater.status[self.entity_description.value_key] + return self._heater.status[self.entity_description.value_key] # type: ignore [no-any-return] @property def extra_state_attributes(self) -> dict[str, Any] | None: diff --git a/homeassistant/components/incomfort/strings.json b/homeassistant/components/incomfort/strings.json index 8bcfa4ce5e1..4c47d4c57ad 100644 --- a/homeassistant/components/incomfort/strings.json +++ b/homeassistant/components/incomfort/strings.json @@ -14,6 +14,22 @@ "password": "The password to log into the gateway, is printed at the bottom of the Lan2RF Gateway or is `intergas` for some older devices." } }, + "dhcp_auth": { + "title": "Set up Intergas InComfort Lan2RF Gateway", + "description": "Please enter authentication details for gateway {host}", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "The username to log into the gateway. This is `admin` in most cases.", + "password": "The password to log into the gateway, is printed at the bottom of the Lan2RF Gateway or is `intergas` for some older devices." + } + }, + "dhcp_confirm": { + "title": "Set up Intergas InComfort Lan2RF Gateway", + "description": "Do you want to set up the discovered Intergas InComfort Lan2RF Gateway ({host})?" + }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" @@ -33,8 +49,22 @@ "auth_error": "Invalid credentials.", "no_heaters": "No heaters found.", "not_found": "No Lan2RF gateway found.", - "timeout_error": "Time out when connection to Lan2RF gateway.", - "unknown": "Unknown error when connection to Lan2RF gateway." + "timeout_error": "Time out when connecting to Lan2RF gateway.", + "unknown": "Unknown error when connecting to Lan2RF gateway." + } + }, + "exceptions": { + "no_heaters": { + "message": "[%key:component::incomfort::config::error::no_heaters%]" + }, + "not_found": { + "message": "[%key:component::incomfort::config::error::not_found%]" + }, + "timeout_error": { + "message": "[%key:component::incomfort::config::error::timeout_error%]" + }, + "unknown": { + "message": "[%key:component::incomfort::config::error::unknown%]" } }, "options": { diff --git a/homeassistant/components/isy994/strings.json b/homeassistant/components/isy994/strings.json index f0e55881652..86a1f14ff91 100644 --- a/homeassistant/components/isy994/strings.json +++ b/homeassistant/components/isy994/strings.json @@ -37,7 +37,7 @@ "step": { "init": { "title": "ISY Options", - "description": "Set the options for the ISY Integration: \n • Node Sensor String: Any device or folder that contains 'Node Sensor String' in the name will be treated as a sensor or binary sensor. \n • Ignore String: Any device with 'Ignore String' in the name will be ignored. \n • Variable Sensor String: Any variable that contains 'Variable Sensor String' will be added as a sensor. \n • Restore Light Brightness: If enabled, the previous brightness will be restored when turning on a light instead of the device's built-in On-Level.", + "description": "Set the options for the ISY integration: \n • Node Sensor String: Any device or folder that contains 'Node Sensor String' in the name will be treated as a sensor or binary sensor. \n • Ignore String: Any device with 'Ignore String' in the name will be ignored. \n • Variable Sensor String: Any variable that contains 'Variable Sensor String' will be added as a sensor. \n • Restore Light Brightness: If enabled, the previous brightness will be restored when turning on a light instead of the device's built-in On-Level.", "data": { "sensor_string": "Node Sensor String", "ignore_string": "Ignore String", @@ -62,7 +62,7 @@ "fields": { "command": { "name": "Command", - "description": "The ISY REST Command to be sent to the device." + "description": "The ISY REST command to be sent to the device." }, "value": { "name": "Value", @@ -74,13 +74,13 @@ }, "unit_of_measurement": { "name": "Unit of measurement", - "description": "The ISY Unit of Measurement (UOM) to send with the command, if required." + "description": "The ISY unit of measurement (UOM) to send with the command, if required." } } }, "send_node_command": { "name": "Send node command", - "description": "Sends a command to an ISY Device using its Home Assistant entity ID. Valid commands are: beep, brighten, dim, disable, enable, fade_down, fade_stop, fade_up, fast_off, fast_on, and query.", + "description": "Sends a command to an ISY device using its Home Assistant entity ID. Valid commands are: beep, brighten, dim, disable, enable, fade_down, fade_stop, fade_up, fast_off, fast_on, and query.", "fields": { "command": { "name": "Command", @@ -90,7 +90,7 @@ }, "get_zwave_parameter": { "name": "Get Z-Wave Parameter", - "description": "Requests a Z-Wave Device parameter via the ISY. The parameter value will be returned as a entity extra state attribute with the name \"ZW_#\" where \"#\" is the parameter number.", + "description": "Requests a Z-Wave device parameter via the ISY. The parameter value will be returned as a entity extra state attribute with the name \"ZW_#\" where \"#\" is the parameter number.", "fields": { "parameter": { "name": "Parameter", @@ -99,8 +99,8 @@ } }, "set_zwave_parameter": { - "name": "Set Z-Wave Parameter", - "description": "Updates a Z-Wave Device parameter via the ISY. The parameter value will also be returned as a entity extra state attribute with the name \"ZW_#\" where \"#\" is the parameter number.", + "name": "Set Z-Wave parameter", + "description": "Updates a Z-Wave device parameter via the ISY. The parameter value will also be returned as a entity extra state attribute with the name \"ZW_#\" where \"#\" is the parameter number.", "fields": { "parameter": { "name": "[%key:component::isy994::services::get_zwave_parameter::fields::parameter::name%]", @@ -117,8 +117,8 @@ } }, "set_zwave_lock_user_code": { - "name": "Set Z-Wave Lock User Code", - "description": "Sets a Z-Wave Lock User Code via the ISY.", + "name": "Set Z-Wave lock user code", + "description": "Sets a user code for a Z-Wave lock via the ISY.", "fields": { "user_num": { "name": "User Number", @@ -131,8 +131,8 @@ } }, "delete_zwave_lock_user_code": { - "name": "Delete Z-Wave Lock User Code", - "description": "Delete a Z-Wave Lock User Code via the ISY.", + "name": "Delete Z-Wave lock user code", + "description": "Deletes a user code for a Z-Wave lock via the ISY.", "fields": { "user_num": { "name": "[%key:component::isy994::services::set_zwave_lock_user_code::fields::user_num::name%]", @@ -141,8 +141,8 @@ } }, "rename_node": { - "name": "Rename Node on ISY", - "description": "Renames a node or group (scene) on the ISY. Note: this will not automatically change the Home Assistant Entity Name or Entity ID to match. The entity name and ID will only be updated after reloading the integration or restarting Home Assistant, and ONLY IF you have not already customized the name within Home Assistant.", + "name": "Rename node on ISY", + "description": "Renames a node or group (scene) on the ISY. Note: this will not automatically change the Home Assistant entity name or entity ID to match. The entity name and ID will only be updated after reloading the integration or restarting Home Assistant, and ONLY IF you have not already customized the name within Home Assistant.", "fields": { "name": { "name": "New Name", diff --git a/homeassistant/components/led_ble/config_flow.py b/homeassistant/components/led_ble/config_flow.py index 90d86d44160..517fb3759de 100644 --- a/homeassistant/components/led_ble/config_flow.py +++ b/homeassistant/components/led_ble/config_flow.py @@ -6,7 +6,7 @@ import logging from typing import Any from bluetooth_data_tools import human_readable_name -from led_ble import BLEAK_EXCEPTIONS, LEDBLE +from led_ble import BLEAK_EXCEPTIONS, LEDBLE, CharacteristicMissingError import voluptuous as vol from homeassistant.components.bluetooth import ( @@ -66,6 +66,8 @@ class LedBleConfigFlow(ConfigFlow, domain=DOMAIN): led_ble = LEDBLE(discovery_info.device) try: await led_ble.update() + except CharacteristicMissingError: + return self.async_abort(reason="not_supported") except BLEAK_EXCEPTIONS: errors["base"] = "cannot_connect" except Exception: diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 24e986000bb..7b07653e2db 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -26,9 +26,6 @@ { "local_name": "AP-*" }, - { - "local_name": "MELK-*" - }, { "local_name": "LD-0003" } @@ -38,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.22.0", "led-ble==1.1.1"] + "requirements": ["bluetooth-data-tools==1.22.0", "led-ble==1.1.4"] } diff --git a/homeassistant/components/linkplay/entity.py b/homeassistant/components/linkplay/entity.py index 00e2f39b233..74e067f5eb3 100644 --- a/homeassistant/components/linkplay/entity.py +++ b/homeassistant/components/linkplay/entity.py @@ -44,9 +44,15 @@ class LinkPlayBaseEntity(Entity): if model != MANUFACTURER_GENERIC: model_id = bridge.device.properties["project"] + connections: set[tuple[str, str]] = set() + if "MAC" in bridge.device.properties: + connections.add( + (dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"]) + ) + self._attr_device_info = dr.DeviceInfo( configuration_url=bridge.endpoint, - connections={(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])}, + connections=connections, hw_version=bridge.device.properties["hardware"], identifiers={(DOMAIN, bridge.device.uuid)}, manufacturer=manufacturer, diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index cc124ceb611..ec9a8759a30 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.1.1"], + "requirements": ["python-linkplay==0.1.3"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/litterrobot/__init__.py b/homeassistant/components/litterrobot/__init__.py index 3c55c4c4035..76274f987cd 100644 --- a/homeassistant/components/litterrobot/__init__.py +++ b/homeassistant/components/litterrobot/__init__.py @@ -4,15 +4,12 @@ from __future__ import annotations from pylitterbot import FeederRobot, LitterRobot, LitterRobot3, LitterRobot4, Robot -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry from .const import DOMAIN -from .hub import LitterRobotHub - -type LitterRobotConfigEntry = ConfigEntry[LitterRobotHub] +from .coordinator import LitterRobotConfigEntry, LitterRobotDataUpdateCoordinator PLATFORMS_BY_TYPE = { Robot: ( @@ -41,11 +38,11 @@ def get_platforms_for_robots(robots: list[Robot]) -> set[Platform]: async def async_setup_entry(hass: HomeAssistant, entry: LitterRobotConfigEntry) -> bool: """Set up Litter-Robot from a config entry.""" - hub = LitterRobotHub(hass, entry.data) - await hub.login(load_robots=True, subscribe_for_updates=True) - entry.runtime_data = hub + coordinator = LitterRobotDataUpdateCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator - if platforms := get_platforms_for_robots(hub.account.robots): + if platforms := get_platforms_for_robots(coordinator.account.robots): await hass.config_entries.async_forward_entry_setups(entry, platforms) return True diff --git a/homeassistant/components/litterrobot/binary_sensor.py b/homeassistant/components/litterrobot/binary_sensor.py index 91113d6c094..e6cf23fa27c 100644 --- a/homeassistant/components/litterrobot/binary_sensor.py +++ b/homeassistant/components/litterrobot/binary_sensor.py @@ -17,33 +17,17 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry from .entity import LitterRobotEntity, _RobotT -@dataclass(frozen=True) -class RequiredKeysMixin(Generic[_RobotT]): - """A class that describes robot binary sensor entity required keys.""" - - is_on_fn: Callable[[_RobotT], bool] - - -@dataclass(frozen=True) +@dataclass(frozen=True, kw_only=True) class RobotBinarySensorEntityDescription( - BinarySensorEntityDescription, RequiredKeysMixin[_RobotT] + BinarySensorEntityDescription, Generic[_RobotT] ): """A class that describes robot binary sensor entities.""" - -class LitterRobotBinarySensorEntity(LitterRobotEntity[_RobotT], BinarySensorEntity): - """Litter-Robot binary sensor entity.""" - - entity_description: RobotBinarySensorEntityDescription[_RobotT] - - @property - def is_on(self) -> bool: - """Return the state.""" - return self.entity_description.is_on_fn(self.robot) + is_on_fn: Callable[[_RobotT], bool] BINARY_SENSOR_MAP: dict[type[Robot], tuple[RobotBinarySensorEntityDescription, ...]] = { @@ -82,11 +66,24 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Litter-Robot binary sensors using config entry.""" - hub = entry.runtime_data + coordinator = entry.runtime_data async_add_entities( - LitterRobotBinarySensorEntity(robot=robot, hub=hub, description=description) - for robot in hub.account.robots + LitterRobotBinarySensorEntity( + robot=robot, coordinator=coordinator, description=description + ) + for robot in coordinator.account.robots for robot_type, entity_descriptions in BINARY_SENSOR_MAP.items() if isinstance(robot, robot_type) for description in entity_descriptions ) + + +class LitterRobotBinarySensorEntity(LitterRobotEntity[_RobotT], BinarySensorEntity): + """Litter-Robot binary sensor entity.""" + + entity_description: RobotBinarySensorEntityDescription[_RobotT] + + @property + def is_on(self) -> bool: + """Return the state.""" + return self.entity_description.is_on_fn(self.robot) diff --git a/homeassistant/components/litterrobot/button.py b/homeassistant/components/litterrobot/button.py index 6e6cc563c8e..01888e7fbae 100644 --- a/homeassistant/components/litterrobot/button.py +++ b/homeassistant/components/litterrobot/button.py @@ -4,70 +4,62 @@ from __future__ import annotations from collections.abc import Callable, Coroutine from dataclasses import dataclass -import itertools from typing import Any, Generic -from pylitterbot import FeederRobot, LitterRobot3 +from pylitterbot import FeederRobot, LitterRobot3, LitterRobot4, Robot from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry from .entity import LitterRobotEntity, _RobotT +@dataclass(frozen=True, kw_only=True) +class RobotButtonEntityDescription(ButtonEntityDescription, Generic[_RobotT]): + """A class that describes robot button entities.""" + + press_fn: Callable[[_RobotT], Coroutine[Any, Any, bool]] + + +ROBOT_BUTTON_MAP: dict[type[Robot], RobotButtonEntityDescription] = { + LitterRobot3: RobotButtonEntityDescription[LitterRobot3]( + key="reset_waste_drawer", + translation_key="reset_waste_drawer", + entity_category=EntityCategory.CONFIG, + press_fn=lambda robot: robot.reset_waste_drawer(), + ), + LitterRobot4: RobotButtonEntityDescription[LitterRobot4]( + key="reset", + translation_key="reset", + entity_category=EntityCategory.CONFIG, + press_fn=lambda robot: robot.reset(), + ), + FeederRobot: RobotButtonEntityDescription[FeederRobot]( + key="give_snack", + translation_key="give_snack", + press_fn=lambda robot: robot.give_snack(), + ), +} + + async def async_setup_entry( hass: HomeAssistant, entry: LitterRobotConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Litter-Robot cleaner using config entry.""" - hub = entry.runtime_data - entities: list[LitterRobotButtonEntity] = list( - itertools.chain( - ( - LitterRobotButtonEntity( - robot=robot, hub=hub, description=LITTER_ROBOT_BUTTON - ) - for robot in hub.litter_robots() - if isinstance(robot, LitterRobot3) - ), - ( - LitterRobotButtonEntity( - robot=robot, hub=hub, description=FEEDER_ROBOT_BUTTON - ) - for robot in hub.feeder_robots() - ), + coordinator = entry.runtime_data + async_add_entities( + LitterRobotButtonEntity( + robot=robot, coordinator=coordinator, description=description ) + for robot in coordinator.account.robots + for robot_type, description in ROBOT_BUTTON_MAP.items() + if isinstance(robot, robot_type) ) - async_add_entities(entities) - - -@dataclass(frozen=True) -class RequiredKeysMixin(Generic[_RobotT]): - """A class that describes robot button entity required keys.""" - - press_fn: Callable[[_RobotT], Coroutine[Any, Any, bool]] - - -@dataclass(frozen=True) -class RobotButtonEntityDescription(ButtonEntityDescription, RequiredKeysMixin[_RobotT]): - """A class that describes robot button entities.""" - - -LITTER_ROBOT_BUTTON = RobotButtonEntityDescription[LitterRobot3]( - key="reset_waste_drawer", - translation_key="reset_waste_drawer", - entity_category=EntityCategory.CONFIG, - press_fn=lambda robot: robot.reset_waste_drawer(), -) -FEEDER_ROBOT_BUTTON = RobotButtonEntityDescription[FeederRobot]( - key="give_snack", - translation_key="give_snack", - press_fn=lambda robot: robot.give_snack(), -) class LitterRobotButtonEntity(LitterRobotEntity[_RobotT], ButtonEntity): @@ -78,4 +70,4 @@ class LitterRobotButtonEntity(LitterRobotEntity[_RobotT], ButtonEntity): async def async_press(self) -> None: """Press the button.""" await self.entity_description.press_fn(self.robot) - self.coordinator.async_set_updated_data(True) + self.coordinator.async_set_updated_data(None) diff --git a/homeassistant/components/litterrobot/hub.py b/homeassistant/components/litterrobot/coordinator.py similarity index 51% rename from homeassistant/components/litterrobot/hub.py rename to homeassistant/components/litterrobot/coordinator.py index 77050855c70..a56a6607d32 100644 --- a/homeassistant/components/litterrobot/hub.py +++ b/homeassistant/components/litterrobot/coordinator.py @@ -1,64 +1,66 @@ -"""A wrapper 'hub' for the Litter-Robot API.""" +"""The Litter-Robot coordinator.""" from __future__ import annotations -from collections.abc import Generator, Mapping +from collections.abc import Generator from datetime import timedelta import logging -from typing import Any from pylitterbot import Account, FeederRobot, LitterRobot from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginException +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -UPDATE_INTERVAL_SECONDS = 60 * 5 +UPDATE_INTERVAL = timedelta(minutes=5) + +type LitterRobotConfigEntry = ConfigEntry[LitterRobotDataUpdateCoordinator] -class LitterRobotHub: - """A Litter-Robot hub wrapper class.""" +class LitterRobotDataUpdateCoordinator(DataUpdateCoordinator[None]): + """The Litter-Robot data update coordinator.""" - def __init__(self, hass: HomeAssistant, data: Mapping[str, Any]) -> None: - """Initialize the Litter-Robot hub.""" - self._data = data - self.account = Account(websession=async_get_clientsession(hass)) + config_entry: LitterRobotConfigEntry - async def _async_update_data() -> bool: - """Update all device states from the Litter-Robot API.""" - await self.account.refresh_robots() - return True - - self.coordinator = DataUpdateCoordinator( + def __init__( + self, hass: HomeAssistant, config_entry: LitterRobotConfigEntry + ) -> None: + """Initialize the Litter-Robot data update coordinator.""" + super().__init__( hass, _LOGGER, + config_entry=config_entry, name=DOMAIN, - update_method=_async_update_data, - update_interval=timedelta(seconds=UPDATE_INTERVAL_SECONDS), + update_interval=UPDATE_INTERVAL, ) - async def login( - self, load_robots: bool = False, subscribe_for_updates: bool = False - ) -> None: - """Login to Litter-Robot.""" + self.account = Account(websession=async_get_clientsession(hass)) + + async def _async_update_data(self) -> None: + """Update all device states from the Litter-Robot API.""" + await self.account.refresh_robots() + + async def _async_setup(self) -> None: + """Set up the coordinator.""" try: await self.account.connect( - username=self._data[CONF_USERNAME], - password=self._data[CONF_PASSWORD], - load_robots=load_robots, - subscribe_for_updates=subscribe_for_updates, + username=self.config_entry.data[CONF_USERNAME], + password=self.config_entry.data[CONF_PASSWORD], + load_robots=True, + subscribe_for_updates=True, ) except LitterRobotLoginException as ex: raise ConfigEntryAuthFailed("Invalid credentials") from ex except LitterRobotException as ex: - raise ConfigEntryNotReady("Unable to connect to Litter-Robot API") from ex + raise UpdateFailed("Unable to connect to Litter-Robot API") from ex def litter_robots(self) -> Generator[LitterRobot]: """Get Litter-Robots from the account.""" diff --git a/homeassistant/components/litterrobot/entity.py b/homeassistant/components/litterrobot/entity.py index 4639404b92b..36cbbb730ce 100644 --- a/homeassistant/components/litterrobot/entity.py +++ b/homeassistant/components/litterrobot/entity.py @@ -9,44 +9,39 @@ from pylitterbot.robot import EVENT_UPDATE from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .hub import LitterRobotHub +from .coordinator import LitterRobotDataUpdateCoordinator _RobotT = TypeVar("_RobotT", bound=Robot) class LitterRobotEntity( - CoordinatorEntity[DataUpdateCoordinator[bool]], Generic[_RobotT] + CoordinatorEntity[LitterRobotDataUpdateCoordinator], Generic[_RobotT] ): """Generic Litter-Robot entity representing common data and methods.""" _attr_has_entity_name = True def __init__( - self, robot: _RobotT, hub: LitterRobotHub, description: EntityDescription + self, + robot: _RobotT, + coordinator: LitterRobotDataUpdateCoordinator, + description: EntityDescription, ) -> None: """Pass coordinator to CoordinatorEntity.""" - super().__init__(hub.coordinator) + super().__init__(coordinator) self.robot = robot - self.hub = hub self.entity_description = description - self._attr_unique_id = f"{self.robot.serial}-{description.key}" - - @property - def device_info(self) -> DeviceInfo: - """Return the device information for a Litter-Robot.""" - assert self.robot.serial - return DeviceInfo( - identifiers={(DOMAIN, self.robot.serial)}, - manufacturer="Litter-Robot", - model=self.robot.model, - name=self.robot.name, - sw_version=getattr(self.robot, "firmware", None), + self._attr_unique_id = f"{robot.serial}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, robot.serial)}, + manufacturer="Whisker", + model=robot.model, + name=robot.name, + serial_number=robot.serial, + sw_version=getattr(robot, "firmware", None), ) async def async_added_to_hass(self) -> None: diff --git a/homeassistant/components/litterrobot/icons.json b/homeassistant/components/litterrobot/icons.json index 482031f8424..ba3df2114b7 100644 --- a/homeassistant/components/litterrobot/icons.json +++ b/homeassistant/components/litterrobot/icons.json @@ -17,6 +17,14 @@ } }, "select": { + "brightness_level": { + "default": "mdi:lightbulb-question", + "state": { + "low": "mdi:lightbulb-on-30", + "medium": "mdi:lightbulb-on-50", + "high": "mdi:lightbulb-on" + } + }, "cycle_delay": { "default": "mdi:timer-outline" }, diff --git a/homeassistant/components/litterrobot/quality_scale.yaml b/homeassistant/components/litterrobot/quality_scale.yaml new file mode 100644 index 00000000000..3eae5d3e668 --- /dev/null +++ b/homeassistant/components/litterrobot/quality_scale.yaml @@ -0,0 +1,113 @@ +rules: + # Adjust platform files for consistent flow: + # [entity description classes] + # [entity descriptions] + # [async_setup_entry] + # [entity classes]) + # Remove RequiredKeyMixins and add kw_only to classes + # Wrap multiline lambdas in parenthesis + # Extend entity description in switch.py to use value_fn instead of getattr + # Deprecate extra state attributes in vacuum.py + # Bronze + action-setup: + status: todo + comment: | + Action async_set_sleep_mode is currently setup in the vacuum platform + appropriate-polling: + status: done + comment: | + Primarily relies on push data, but polls every 5 minutes for missed updates + brands: done + common-modules: + status: todo + comment: | + hub.py should be renamed to coordinator.py and updated accordingly + Also should not need to return bool (never used) + config-flow-test-coverage: + status: todo + comment: | + Fix stale title and docstring + Make sure every test ends in either ABORT or CREATE_ENTRY + so we also test that the flow is able to recover + config-flow: done + dependency-transparency: done + docs-actions: + status: todo + comment: Can be finished after async_set_sleep_mode is moved to async_setup + docs-high-level-description: done + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: todo + comment: Do we need to subscribe to both the coordinator and robot itself? + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: + status: done + comment: No options to configure + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: done + test-coverage: + status: todo + comment: | + Move big data objects from common.py into JSON fixtures and oad them when needed. + Other fields can be moved to const.py. Consider snapshots and testing data updates + + # Gold + devices: + status: done + comment: Currently uses the device_info property, could be moved to _attr_device_info + diagnostics: todo + discovery-update-info: + status: done + comment: The integration is cloud-based + discovery: + status: todo + comment: Need to validate discovery + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: todo + comment: Check if we should disable any entities by default + entity-translations: + status: todo + comment: Make sure all translated states are in sentence case + exception-translations: todo + icon-translations: + status: todo + comment: BRIGHTNESS_LEVEL_ICON_MAP should be migrated to icons.json + reconfiguration-flow: todo + repair-issues: + status: done + comment: | + This integration doesn't have any cases where raising an issue is needed + stale-devices: + status: todo + comment: | + Currently handled via async_remove_config_entry_device, + but we should be able to remove devices automatically + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/litterrobot/select.py b/homeassistant/components/litterrobot/select.py index 948fad45a76..1a3d2fc2fb4 100644 --- a/homeassistant/components/litterrobot/select.py +++ b/homeassistant/components/litterrobot/select.py @@ -14,37 +14,22 @@ from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry, LitterRobotDataUpdateCoordinator from .entity import LitterRobotEntity, _RobotT -from .hub import LitterRobotHub _CastTypeT = TypeVar("_CastTypeT", int, float, str) -BRIGHTNESS_LEVEL_ICON_MAP: dict[BrightnessLevel | None, str] = { - BrightnessLevel.LOW: "mdi:lightbulb-on-30", - BrightnessLevel.MEDIUM: "mdi:lightbulb-on-50", - BrightnessLevel.HIGH: "mdi:lightbulb-on", - None: "mdi:lightbulb-question", -} - -@dataclass(frozen=True) -class RequiredKeysMixin(Generic[_RobotT, _CastTypeT]): - """A class that describes robot select entity required keys.""" - - current_fn: Callable[[_RobotT], _CastTypeT | None] - options_fn: Callable[[_RobotT], list[_CastTypeT]] - select_fn: Callable[[_RobotT, str], Coroutine[Any, Any, bool]] - - -@dataclass(frozen=True) +@dataclass(frozen=True, kw_only=True) class RobotSelectEntityDescription( - SelectEntityDescription, RequiredKeysMixin[_RobotT, _CastTypeT] + SelectEntityDescription, Generic[_RobotT, _CastTypeT] ): """A class that describes robot select entities.""" entity_category: EntityCategory = EntityCategory.CONFIG - icon_fn: Callable[[_RobotT], str] | None = None + current_fn: Callable[[_RobotT], _CastTypeT | None] + options_fn: Callable[[_RobotT], list[_CastTypeT]] + select_fn: Callable[[_RobotT, str], Coroutine[Any, Any, bool]] ROBOT_SELECT_MAP: dict[type[Robot], RobotSelectEntityDescription] = { @@ -59,14 +44,15 @@ ROBOT_SELECT_MAP: dict[type[Robot], RobotSelectEntityDescription] = { LitterRobot4: RobotSelectEntityDescription[LitterRobot4, str]( key="panel_brightness", translation_key="brightness_level", - current_fn=lambda robot: bri.name.lower() - if (bri := robot.panel_brightness) is not None - else None, - options_fn=lambda _: [level.name.lower() for level in BrightnessLevel], - select_fn=lambda robot, opt: robot.set_panel_brightness( - BrightnessLevel[opt.upper()] + current_fn=( + lambda robot: bri.name.lower() + if (bri := robot.panel_brightness) is not None + else None + ), + options_fn=lambda _: [level.name.lower() for level in BrightnessLevel], + select_fn=( + lambda robot, opt: robot.set_panel_brightness(BrightnessLevel[opt.upper()]) ), - icon_fn=lambda robot: BRIGHTNESS_LEVEL_ICON_MAP[robot.panel_brightness], ), FeederRobot: RobotSelectEntityDescription[FeederRobot, float]( key="meal_insert_size", @@ -85,14 +71,15 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Litter-Robot selects using config entry.""" - hub = entry.runtime_data - entities = [ - LitterRobotSelectEntity(robot=robot, hub=hub, description=description) - for robot in hub.account.robots + coordinator = entry.runtime_data + async_add_entities( + LitterRobotSelectEntity( + robot=robot, coordinator=coordinator, description=description + ) + for robot in coordinator.account.robots for robot_type, description in ROBOT_SELECT_MAP.items() if isinstance(robot, robot_type) - ] - async_add_entities(entities) + ) class LitterRobotSelectEntity( @@ -105,21 +92,14 @@ class LitterRobotSelectEntity( def __init__( self, robot: _RobotT, - hub: LitterRobotHub, + coordinator: LitterRobotDataUpdateCoordinator, description: RobotSelectEntityDescription[_RobotT, _CastTypeT], ) -> None: """Initialize a Litter-Robot select entity.""" - super().__init__(robot, hub, description) + super().__init__(robot, coordinator, description) options = self.entity_description.options_fn(self.robot) self._attr_options = list(map(str, options)) - @property - def icon(self) -> str | None: - """Return the icon to use in the frontend, if any.""" - if icon_fn := self.entity_description.icon_fn: - return str(icon_fn(self.robot)) - return super().icon - @property def current_option(self) -> str | None: """Return the selected entity option to represent the entity state.""" diff --git a/homeassistant/components/litterrobot/sensor.py b/homeassistant/components/litterrobot/sensor.py index c110b89c7da..6545d7c7ae7 100644 --- a/homeassistant/components/litterrobot/sensor.py +++ b/homeassistant/components/litterrobot/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import Any, Generic, cast +from typing import Any, Generic from pylitterbot import FeederRobot, LitterRobot, LitterRobot4, Robot @@ -19,7 +19,7 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfMass from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry from .entity import LitterRobotEntity, _RobotT @@ -34,34 +34,12 @@ def icon_for_gauge_level(gauge_level: int | None = None, offset: int = 0) -> str return "mdi:gauge-low" -@dataclass(frozen=True) +@dataclass(frozen=True, kw_only=True) class RobotSensorEntityDescription(SensorEntityDescription, Generic[_RobotT]): """A class that describes robot sensor entities.""" icon_fn: Callable[[Any], str | None] = lambda _: None - should_report: Callable[[_RobotT], bool] = lambda _: True - - -class LitterRobotSensorEntity(LitterRobotEntity[_RobotT], SensorEntity): - """Litter-Robot sensor entity.""" - - entity_description: RobotSensorEntityDescription[_RobotT] - - @property - def native_value(self) -> float | datetime | str | None: - """Return the state.""" - if self.entity_description.should_report(self.robot): - if isinstance(val := getattr(self.robot, self.entity_description.key), str): - return val.lower() - return cast(float | datetime | None, val) - return None - - @property - def icon(self) -> str | None: - """Return the icon to use in the frontend, if any.""" - if (icon := self.entity_description.icon_fn(self.state)) is not None: - return icon - return super().icon + value_fn: Callable[[_RobotT], float | datetime | str | None] ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = { @@ -72,24 +50,34 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = { native_unit_of_measurement=PERCENTAGE, icon_fn=lambda state: icon_for_gauge_level(state, 10), state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda robot: robot.waste_drawer_level, ), RobotSensorEntityDescription[LitterRobot]( key="sleep_mode_start_time", translation_key="sleep_mode_start_time", device_class=SensorDeviceClass.TIMESTAMP, - should_report=lambda robot: robot.sleep_mode_enabled, + value_fn=( + lambda robot: robot.sleep_mode_start_time + if robot.sleep_mode_enabled + else None + ), ), RobotSensorEntityDescription[LitterRobot]( key="sleep_mode_end_time", translation_key="sleep_mode_end_time", device_class=SensorDeviceClass.TIMESTAMP, - should_report=lambda robot: robot.sleep_mode_enabled, + value_fn=( + lambda robot: robot.sleep_mode_end_time + if robot.sleep_mode_enabled + else None + ), ), RobotSensorEntityDescription[LitterRobot]( key="last_seen", translation_key="last_seen", device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda robot: robot.last_seen, ), RobotSensorEntityDescription[LitterRobot]( key="status_code", @@ -123,6 +111,9 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = { "sdf", "spf", ], + value_fn=( + lambda robot: status.lower() if (status := robot.status_code) else None + ), ), ], LitterRobot4: [ @@ -132,6 +123,7 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = { native_unit_of_measurement=PERCENTAGE, icon_fn=lambda state: icon_for_gauge_level(state, 10), state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda robot: robot.litter_level, ), RobotSensorEntityDescription[LitterRobot4]( key="pet_weight", @@ -139,6 +131,7 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = { native_unit_of_measurement=UnitOfMass.POUNDS, device_class=SensorDeviceClass.WEIGHT, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda robot: robot.pet_weight, ), ], FeederRobot: [ @@ -148,6 +141,7 @@ ROBOT_SENSOR_MAP: dict[type[Robot], list[RobotSensorEntityDescription]] = { native_unit_of_measurement=PERCENTAGE, icon_fn=lambda state: icon_for_gauge_level(state, 10), state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda robot: robot.food_level, ) ], } @@ -159,12 +153,31 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Litter-Robot sensors using config entry.""" - hub = entry.runtime_data - entities = [ - LitterRobotSensorEntity(robot=robot, hub=hub, description=description) - for robot in hub.account.robots + coordinator = entry.runtime_data + async_add_entities( + LitterRobotSensorEntity( + robot=robot, coordinator=coordinator, description=description + ) + for robot in coordinator.account.robots for robot_type, entity_descriptions in ROBOT_SENSOR_MAP.items() if isinstance(robot, robot_type) for description in entity_descriptions - ] - async_add_entities(entities) + ) + + +class LitterRobotSensorEntity(LitterRobotEntity[_RobotT], SensorEntity): + """Litter-Robot sensor entity.""" + + entity_description: RobotSensorEntityDescription[_RobotT] + + @property + def native_value(self) -> float | datetime | str | None: + """Return the state.""" + return self.entity_description.value_fn(self.robot) + + @property + def icon(self) -> str | None: + """Return the icon to use in the frontend, if any.""" + if (icon := self.entity_description.icon_fn(self.state)) is not None: + return icon + return super().icon diff --git a/homeassistant/components/litterrobot/strings.json b/homeassistant/components/litterrobot/strings.json index 7acfad69735..19b007de068 100644 --- a/homeassistant/components/litterrobot/strings.json +++ b/homeassistant/components/litterrobot/strings.json @@ -5,6 +5,10 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "The email address of your Whisker account.", + "password": "The password of your Whisker account." } }, "reauth_confirm": { @@ -12,6 +16,9 @@ "title": "[%key:common::config_flow::title::reauth%]", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::litterrobot::config::step::user::data_description::password%]" } } }, @@ -38,6 +45,9 @@ } }, "button": { + "reset": { + "name": "Reset" + }, "reset_waste_drawer": { "name": "Reset waste drawer" }, diff --git a/homeassistant/components/litterrobot/switch.py b/homeassistant/components/litterrobot/switch.py index 133fd897cc6..7ded89d552b 100644 --- a/homeassistant/components/litterrobot/switch.py +++ b/homeassistant/components/litterrobot/switch.py @@ -13,22 +13,17 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry from .entity import LitterRobotEntity, _RobotT -@dataclass(frozen=True) -class RequiredKeysMixin(Generic[_RobotT]): - """A class that describes robot switch entity required keys.""" - - set_fn: Callable[[_RobotT, bool], Coroutine[Any, Any, bool]] - - -@dataclass(frozen=True) -class RobotSwitchEntityDescription(SwitchEntityDescription, RequiredKeysMixin[_RobotT]): +@dataclass(frozen=True, kw_only=True) +class RobotSwitchEntityDescription(SwitchEntityDescription, Generic[_RobotT]): """A class that describes robot switch entities.""" entity_category: EntityCategory = EntityCategory.CONFIG + set_fn: Callable[[_RobotT, bool], Coroutine[Any, Any, bool]] + value_fn: Callable[[_RobotT], bool] ROBOT_SWITCHES = [ @@ -36,15 +31,32 @@ ROBOT_SWITCHES = [ key="night_light_mode_enabled", translation_key="night_light_mode", set_fn=lambda robot, value: robot.set_night_light(value), + value_fn=lambda robot: robot.night_light_mode_enabled, ), RobotSwitchEntityDescription[LitterRobot | FeederRobot]( key="panel_lock_enabled", translation_key="panel_lockout", set_fn=lambda robot, value: robot.set_panel_lockout(value), + value_fn=lambda robot: robot.panel_lock_enabled, ), ] +async def async_setup_entry( + hass: HomeAssistant, + entry: LitterRobotConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Litter-Robot switches using config entry.""" + coordinator = entry.runtime_data + async_add_entities( + RobotSwitchEntity(robot=robot, coordinator=coordinator, description=description) + for description in ROBOT_SWITCHES + for robot in coordinator.account.robots + if isinstance(robot, (LitterRobot, FeederRobot)) + ) + + class RobotSwitchEntity(LitterRobotEntity[_RobotT], SwitchEntity): """Litter-Robot switch entity.""" @@ -53,7 +65,7 @@ class RobotSwitchEntity(LitterRobotEntity[_RobotT], SwitchEntity): @property def is_on(self) -> bool | None: """Return true if switch is on.""" - return bool(getattr(self.robot, self.entity_description.key)) + return self.entity_description.value_fn(self.robot) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" @@ -62,19 +74,3 @@ class RobotSwitchEntity(LitterRobotEntity[_RobotT], SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" await self.entity_description.set_fn(self.robot, False) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: LitterRobotConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Litter-Robot switches using config entry.""" - hub = entry.runtime_data - entities = [ - RobotSwitchEntity(robot=robot, hub=hub, description=description) - for description in ROBOT_SWITCHES - for robot in hub.account.robots - if isinstance(robot, (LitterRobot, FeederRobot)) - ] - async_add_entities(entities) diff --git a/homeassistant/components/litterrobot/time.py b/homeassistant/components/litterrobot/time.py index ace30d9f3a9..6e3743059b3 100644 --- a/homeassistant/components/litterrobot/time.py +++ b/homeassistant/components/litterrobot/time.py @@ -15,23 +15,18 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry from .entity import LitterRobotEntity, _RobotT -@dataclass(frozen=True) -class RequiredKeysMixin(Generic[_RobotT]): - """A class that describes robot time entity required keys.""" +@dataclass(frozen=True, kw_only=True) +class RobotTimeEntityDescription(TimeEntityDescription, Generic[_RobotT]): + """A class that describes robot time entities.""" value_fn: Callable[[_RobotT], time | None] set_fn: Callable[[_RobotT, time], Coroutine[Any, Any, bool]] -@dataclass(frozen=True) -class RobotTimeEntityDescription(TimeEntityDescription, RequiredKeysMixin[_RobotT]): - """A class that describes robot time entities.""" - - def _as_local_time(start: datetime | None) -> time | None: """Return a datetime as local time.""" return dt_util.as_local(start).time() if start else None @@ -42,8 +37,11 @@ LITTER_ROBOT_3_SLEEP_START = RobotTimeEntityDescription[LitterRobot3]( translation_key="sleep_mode_start_time", entity_category=EntityCategory.CONFIG, value_fn=lambda robot: _as_local_time(robot.sleep_mode_start_time), - set_fn=lambda robot, value: robot.set_sleep_mode( - robot.sleep_mode_enabled, value.replace(tzinfo=dt_util.get_default_time_zone()) + set_fn=( + lambda robot, value: robot.set_sleep_mode( + robot.sleep_mode_enabled, + value.replace(tzinfo=dt_util.get_default_time_zone()), + ) ), ) @@ -54,15 +52,15 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Litter-Robot cleaner using config entry.""" - hub = entry.runtime_data + coordinator = entry.runtime_data async_add_entities( - [ - LitterRobotTimeEntity( - robot=robot, hub=hub, description=LITTER_ROBOT_3_SLEEP_START - ) - for robot in hub.litter_robots() - if isinstance(robot, LitterRobot3) - ] + LitterRobotTimeEntity( + robot=robot, + coordinator=coordinator, + description=LITTER_ROBOT_3_SLEEP_START, + ) + for robot in coordinator.litter_robots() + if isinstance(robot, LitterRobot3) ) diff --git a/homeassistant/components/litterrobot/update.py b/homeassistant/components/litterrobot/update.py index 1d3e1dff57c..53ab23e9db8 100644 --- a/homeassistant/components/litterrobot/update.py +++ b/homeassistant/components/litterrobot/update.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry from .entity import LitterRobotEntity SCAN_INTERVAL = timedelta(days=1) @@ -34,12 +34,14 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Litter-Robot update platform.""" - hub = entry.runtime_data - entities = [ - RobotUpdateEntity(robot=robot, hub=hub, description=FIRMWARE_UPDATE_ENTITY) - for robot in hub.litter_robots() + coordinator = entry.runtime_data + entities = ( + RobotUpdateEntity( + robot=robot, coordinator=coordinator, description=FIRMWARE_UPDATE_ENTITY + ) + for robot in coordinator.litter_robots() if isinstance(robot, LitterRobot4) - ] + ) async_add_entities(entities, True) diff --git a/homeassistant/components/litterrobot/vacuum.py b/homeassistant/components/litterrobot/vacuum.py index bd00c328233..2f9e2e9b24d 100644 --- a/homeassistant/components/litterrobot/vacuum.py +++ b/homeassistant/components/litterrobot/vacuum.py @@ -20,7 +20,7 @@ from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import LitterRobotConfigEntry +from .coordinator import LitterRobotConfigEntry from .entity import LitterRobotEntity SERVICE_SET_SLEEP_MODE = "set_sleep_mode" @@ -49,12 +49,13 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Litter-Robot cleaner using config entry.""" - hub = entry.runtime_data - entities = [ - LitterRobotCleaner(robot=robot, hub=hub, description=LITTER_BOX_ENTITY) - for robot in hub.litter_robots() - ] - async_add_entities(entities) + coordinator = entry.runtime_data + async_add_entities( + LitterRobotCleaner( + robot=robot, coordinator=coordinator, description=LITTER_BOX_ENTITY + ) + for robot in coordinator.litter_robots() + ) platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( @@ -79,13 +80,6 @@ class LitterRobotCleaner(LitterRobotEntity[LitterRobot], StateVacuumEntity): """Return the state of the cleaner.""" return LITTER_BOX_STATUS_STATE_MAP.get(self.robot.status, VacuumActivity.ERROR) - @property - def status(self) -> str: - """Return the status of the cleaner.""" - return ( - f"{self.robot.status.text}{' (Sleeping)' if self.robot.is_sleeping else ''}" - ) - async def async_start(self) -> None: """Start a clean cycle.""" await self.robot.set_power_status(True) @@ -121,13 +115,3 @@ class LitterRobotCleaner(LitterRobotEntity[LitterRobot], StateVacuumEntity): ) .timetz() ) - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device specific state attributes.""" - return { - "is_sleeping": self.robot.is_sleeping, - "sleep_mode_enabled": self.robot.sleep_mode_enabled, - "power_status": self.robot.power_status, - "status": self.status, - } diff --git a/homeassistant/components/lovelace/__init__.py b/homeassistant/components/lovelace/__init__.py index d26e4f1d2d7..9b1c86edb36 100644 --- a/homeassistant/components/lovelace/__init__.py +++ b/homeassistant/components/lovelace/__init__.py @@ -1,10 +1,12 @@ """Support for the Lovelace UI.""" +from dataclasses import dataclass import logging +from typing import Any import voluptuous as vol -from homeassistant.components import frontend, onboarding, websocket_api +from homeassistant.components import frontend, websocket_api from homeassistant.config import ( async_hass_config_yaml, async_process_component_and_handle_errors, @@ -14,9 +16,9 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import collection, config_validation as cv from homeassistant.helpers.service import async_register_admin_service -from homeassistant.helpers.translation import async_get_translations from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_integration +from homeassistant.util import slugify from . import dashboard, resources, websocket from .const import ( # noqa: F401 @@ -30,6 +32,7 @@ from .const import ( # noqa: F401 DEFAULT_ICON, DOMAIN, EVENT_LOVELACE_UPDATED, + LOVELACE_DATA, MODE_STORAGE, MODE_YAML, RESOURCE_CREATE_FIELDS, @@ -39,12 +42,25 @@ from .const import ( # noqa: F401 SERVICE_RELOAD_RESOURCES, STORAGE_DASHBOARD_CREATE_FIELDS, STORAGE_DASHBOARD_UPDATE_FIELDS, - url_slug, ) from .system_health import system_health_info # noqa: F401 _LOGGER = logging.getLogger(__name__) + +def _validate_url_slug(value: Any) -> str: + """Validate value is a valid url slug.""" + if value is None: + raise vol.Invalid("Slug should not be None") + if "-" not in value: + raise vol.Invalid("Url path needs to contain a hyphen (-)") + str_value = str(value) + slg = slugify(str_value, separator="-") + if str_value == slg: + return str_value + raise vol.Invalid(f"invalid slug {value} (try {slg})") + + CONF_DASHBOARDS = "dashboards" YAML_DASHBOARD_SCHEMA = vol.Schema( @@ -64,7 +80,7 @@ CONFIG_SCHEMA = vol.Schema( ), vol.Optional(CONF_DASHBOARDS): cv.schema_with_slug_keys( YAML_DASHBOARD_SCHEMA, - slug_validator=url_slug, + slug_validator=_validate_url_slug, ), vol.Optional(CONF_RESOURCES): [RESOURCE_SCHEMA], } @@ -74,6 +90,16 @@ CONFIG_SCHEMA = vol.Schema( ) +@dataclass +class LovelaceData: + """Dataclass to store information in hass.data.""" + + mode: str + dashboards: dict[str | None, dashboard.LovelaceConfig] + resources: resources.ResourceYAMLCollection | resources.ResourceStorageCollection + yaml_dashboards: dict[str | None, ConfigType] + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Lovelace commands.""" mode = config[DOMAIN][CONF_MODE] @@ -101,9 +127,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: resource_collection = await create_yaml_resource_col( hass, config[DOMAIN].get(CONF_RESOURCES) ) - hass.data[DOMAIN]["resources"] = resource_collection + hass.data[LOVELACE_DATA].resources = resource_collection default_config: dashboard.LovelaceConfig + resource_collection: ( + resources.ResourceYAMLCollection | resources.ResourceStorageCollection + ) if mode == MODE_YAML: default_config = dashboard.LovelaceYAML(hass, None, None) resource_collection = await create_yaml_resource_col(hass, yaml_resources) @@ -152,28 +181,30 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass, websocket.websocket_lovelace_delete_config ) - hass.data[DOMAIN] = { + hass.data[LOVELACE_DATA] = LovelaceData( + mode=mode, # We store a dictionary mapping url_path: config. None is the default. - "mode": mode, - "dashboards": {None: default_config}, - "resources": resource_collection, - "yaml_dashboards": config[DOMAIN].get(CONF_DASHBOARDS, {}), - } + dashboards={None: default_config}, + resources=resource_collection, + yaml_dashboards=config[DOMAIN].get(CONF_DASHBOARDS, {}), + ) if hass.config.recovery_mode: return True - async def storage_dashboard_changed(change_type, item_id, item): + async def storage_dashboard_changed( + change_type: str, item_id: str, item: dict + ) -> None: """Handle a storage dashboard change.""" url_path = item[CONF_URL_PATH] if change_type == collection.CHANGE_REMOVED: frontend.async_remove_panel(hass, url_path) - await hass.data[DOMAIN]["dashboards"].pop(url_path).async_delete() + await hass.data[LOVELACE_DATA].dashboards.pop(url_path).async_delete() return if change_type == collection.CHANGE_ADDED: - existing = hass.data[DOMAIN]["dashboards"].get(url_path) + existing = hass.data[LOVELACE_DATA].dashboards.get(url_path) if existing: _LOGGER.warning( @@ -183,13 +214,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) return - hass.data[DOMAIN]["dashboards"][url_path] = dashboard.LovelaceStorage( + hass.data[LOVELACE_DATA].dashboards[url_path] = dashboard.LovelaceStorage( hass, item ) update = False else: - hass.data[DOMAIN]["dashboards"][url_path].config = item + hass.data[LOVELACE_DATA].dashboards[url_path].config = item update = True try: @@ -198,10 +229,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: _LOGGER.warning("Failed to %s panel %s from storage", change_type, url_path) # Process YAML dashboards - for url_path, dashboard_conf in hass.data[DOMAIN]["yaml_dashboards"].items(): + for url_path, dashboard_conf in hass.data[LOVELACE_DATA].yaml_dashboards.items(): # For now always mode=yaml lovelace_config = dashboard.LovelaceYAML(hass, url_path, dashboard_conf) - hass.data[DOMAIN]["dashboards"][url_path] = lovelace_config + hass.data[LOVELACE_DATA].dashboards[url_path] = lovelace_config try: _register_panel(hass, url_path, MODE_YAML, dashboard_conf, False) @@ -211,9 +242,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # Process storage dashboards dashboards_collection = dashboard.DashboardsCollection(hass) - # This can be removed when the map integration is removed - hass.data[DOMAIN]["dashboards_collection"] = dashboards_collection - dashboards_collection.async_add_listener(storage_dashboard_changed) await dashboards_collection.async_load() @@ -225,16 +253,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: STORAGE_DASHBOARD_UPDATE_FIELDS, ).async_setup(hass) - def create_map_dashboard(): - hass.async_create_task(_create_map_dashboard(hass)) - - if not onboarding.async_is_onboarded(hass): - onboarding.async_add_listener(hass, create_map_dashboard) - return True -async def create_yaml_resource_col(hass, yaml_resources): +async def create_yaml_resource_col( + hass: HomeAssistant, yaml_resources: list[ConfigType] | None +) -> resources.ResourceYAMLCollection: """Create yaml resources collection.""" if yaml_resources is None: default_config = dashboard.LovelaceYAML(hass, None, None) @@ -254,7 +278,9 @@ async def create_yaml_resource_col(hass, yaml_resources): @callback -def _register_panel(hass, url_path, mode, config, update): +def _register_panel( + hass: HomeAssistant, url_path: str | None, mode: str, config: dict, update: bool +) -> None: """Register a panel.""" kwargs = { "frontend_url_path": url_path, @@ -268,25 +294,3 @@ def _register_panel(hass, url_path, mode, config, update): kwargs["sidebar_icon"] = config.get(CONF_ICON, DEFAULT_ICON) frontend.async_register_built_in_panel(hass, DOMAIN, **kwargs) - - -async def _create_map_dashboard(hass: HomeAssistant): - translations = await async_get_translations( - hass, hass.config.language, "dashboard", {onboarding.DOMAIN} - ) - title = translations["component.onboarding.dashboard.map.title"] - - dashboards_collection: dashboard.DashboardsCollection = hass.data[DOMAIN][ - "dashboards_collection" - ] - await dashboards_collection.async_create_item( - { - CONF_ALLOW_SINGLE_WORD: True, - CONF_ICON: "mdi:map", - CONF_TITLE: title, - CONF_URL_PATH: "map", - } - ) - - map_store: dashboard.LovelaceStorage = hass.data[DOMAIN]["dashboards"]["map"] - await map_store.async_save({"strategy": {"type": "map"}}) diff --git a/homeassistant/components/lovelace/cast.py b/homeassistant/components/lovelace/cast.py index c380a296fc0..635425ba3dc 100644 --- a/homeassistant/components/lovelace/cast.py +++ b/homeassistant/components/lovelace/cast.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import Any + from pychromecast import Chromecast from pychromecast.const import CAST_TYPE_CHROMECAST @@ -23,8 +25,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.network import NoURLAvailableError, get_url -from .const import DOMAIN, ConfigNotFound -from .dashboard import LovelaceConfig +from .const import DOMAIN, LOVELACE_DATA, ConfigNotFound DEFAULT_DASHBOARD = "_default_" @@ -76,7 +77,7 @@ async def async_browse_media( can_expand=False, ) ] - for url_path in hass.data[DOMAIN]["dashboards"]: + for url_path in hass.data[LOVELACE_DATA].dashboards: if url_path is None: continue @@ -151,11 +152,13 @@ async def async_play_media( return True -async def _get_dashboard_info(hass, url_path): +async def _get_dashboard_info( + hass: HomeAssistant, url_path: str | None +) -> dict[str, Any]: """Load a dashboard and return info on views.""" if url_path == DEFAULT_DASHBOARD: url_path = None - dashboard: LovelaceConfig | None = hass.data[DOMAIN]["dashboards"].get(url_path) + dashboard = hass.data[LOVELACE_DATA].dashboards.get(url_path) if dashboard is None: raise ValueError("Invalid dashboard specified") @@ -172,7 +175,7 @@ async def _get_dashboard_info(hass, url_path): url_path = dashboard.url_path title = config.get("title", url_path) if config else url_path - views = [] + views: list[dict[str, Any]] = [] data = { "title": title, "url_path": url_path, diff --git a/homeassistant/components/lovelace/const.py b/homeassistant/components/lovelace/const.py index 86f47fe2b5c..0450c62338d 100644 --- a/homeassistant/components/lovelace/const.py +++ b/homeassistant/components/lovelace/const.py @@ -1,6 +1,8 @@ """Constants for Lovelace.""" -from typing import Any +from __future__ import annotations + +from typing import TYPE_CHECKING import voluptuous as vol @@ -14,9 +16,13 @@ from homeassistant.const import ( from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import VolDictType -from homeassistant.util import slugify +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from . import LovelaceData DOMAIN = "lovelace" +LOVELACE_DATA: HassKey[LovelaceData] = HassKey(DOMAIN) DEFAULT_ICON = "hass:view-dashboard" @@ -84,18 +90,5 @@ STORAGE_DASHBOARD_CREATE_FIELDS: VolDictType = { STORAGE_DASHBOARD_UPDATE_FIELDS = DASHBOARD_BASE_UPDATE_FIELDS -def url_slug(value: Any) -> str: - """Validate value is a valid url slug.""" - if value is None: - raise vol.Invalid("Slug should not be None") - if "-" not in value: - raise vol.Invalid("Url path needs to contain a hyphen (-)") - str_value = str(value) - slg = slugify(str_value, separator="-") - if str_value == slg: - return str_value - raise vol.Invalid(f"invalid slug {value} (try {slg})") - - class ConfigNotFound(HomeAssistantError): """When no config available.""" diff --git a/homeassistant/components/lovelace/dashboard.py b/homeassistant/components/lovelace/dashboard.py index 411bbae9153..ddb54e7618f 100644 --- a/homeassistant/components/lovelace/dashboard.py +++ b/homeassistant/components/lovelace/dashboard.py @@ -7,7 +7,7 @@ import logging import os from pathlib import Path import time -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -27,6 +27,7 @@ from .const import ( DOMAIN, EVENT_LOVELACE_UPDATED, LOVELACE_CONFIG_FILE, + LOVELACE_DATA, MODE_STORAGE, MODE_YAML, STORAGE_DASHBOARD_CREATE_FIELDS, @@ -66,21 +67,25 @@ class LovelaceConfig(ABC): """Return mode of the lovelace config.""" @abstractmethod - async def async_get_info(self): + async def async_get_info(self) -> dict[str, Any]: """Return the config info.""" @abstractmethod async def async_load(self, force: bool) -> dict[str, Any]: """Load config.""" - async def async_save(self, config): + async def async_save(self, config: dict[str, Any]) -> None: """Save config.""" raise HomeAssistantError("Not supported") - async def async_delete(self): + async def async_delete(self) -> None: """Delete config.""" raise HomeAssistantError("Not supported") + @abstractmethod + async def async_json(self, force: bool) -> json_fragment: + """Return JSON representation of the config.""" + @callback def _config_updated(self) -> None: """Fire config updated event.""" @@ -112,7 +117,7 @@ class LovelaceStorage(LovelaceConfig): """Return mode of the lovelace config.""" return MODE_STORAGE - async def async_get_info(self): + async def async_get_info(self) -> dict[str, Any]: """Return the Lovelace storage info.""" data = self._data or await self._load() if data["config"] is None: @@ -128,7 +133,7 @@ class LovelaceStorage(LovelaceConfig): if (config := data["config"]) is None: raise ConfigNotFound - return config + return config # type: ignore[no-any-return] async def async_json(self, force: bool) -> json_fragment: """Return JSON representation of the config.""" @@ -138,19 +143,21 @@ class LovelaceStorage(LovelaceConfig): await self._load() return self._json_config or self._async_build_json() - async def async_save(self, config): + async def async_save(self, config: dict[str, Any]) -> None: """Save config.""" if self.hass.config.recovery_mode: raise HomeAssistantError("Saving not supported in recovery mode") if self._data is None: await self._load() + if TYPE_CHECKING: + assert self._data is not None self._data["config"] = config self._json_config = None self._config_updated() await self._store.async_save(self._data) - async def async_delete(self): + async def async_delete(self) -> None: """Delete config.""" if self.hass.config.recovery_mode: raise HomeAssistantError("Deleting not supported in recovery mode") @@ -194,7 +201,7 @@ class LovelaceYAML(LovelaceConfig): """Return mode of the lovelace config.""" return MODE_YAML - async def async_get_info(self): + async def async_get_info(self) -> dict[str, Any]: """Return the YAML storage mode.""" try: config = await self.async_load(False) @@ -250,7 +257,7 @@ class LovelaceYAML(LovelaceConfig): return is_updated, config, json -def _config_info(mode, config): +def _config_info(mode: str, config: dict[str, Any]) -> dict[str, Any]: """Generate info about the config.""" return { "mode": mode, @@ -264,7 +271,7 @@ class DashboardsCollection(collection.DictStorageCollection): CREATE_SCHEMA = vol.Schema(STORAGE_DASHBOARD_CREATE_FIELDS) UPDATE_SCHEMA = vol.Schema(STORAGE_DASHBOARD_UPDATE_FIELDS) - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Initialize the dashboards collection.""" super().__init__( storage.Store(hass, DASHBOARDS_STORAGE_VERSION, DASHBOARDS_STORAGE_KEY), @@ -282,12 +289,12 @@ class DashboardsCollection(collection.DictStorageCollection): if url_path in self.hass.data[DATA_PANELS]: raise vol.Invalid("Panel url path needs to be unique") - return self.CREATE_SCHEMA(data) + return self.CREATE_SCHEMA(data) # type: ignore[no-any-return] @callback def _get_suggested_id(self, info: dict) -> str: """Suggest an ID based on the config.""" - return info[CONF_URL_PATH] + return info[CONF_URL_PATH] # type: ignore[no-any-return] async def _update_data(self, item: dict, update_data: dict) -> dict: """Return a new updated data object.""" @@ -315,7 +322,7 @@ class DashboardsCollectionWebSocket(collection.DictStorageCollectionWebsocket): msg["id"], [ dashboard.config - for dashboard in hass.data[DOMAIN]["dashboards"].values() + for dashboard in hass.data[LOVELACE_DATA].dashboards.values() if dashboard.config ], ) diff --git a/homeassistant/components/lovelace/resources.py b/homeassistant/components/lovelace/resources.py index 316a31e8e9d..96f84ccbc60 100644 --- a/homeassistant/components/lovelace/resources.py +++ b/homeassistant/components/lovelace/resources.py @@ -34,11 +34,11 @@ class ResourceYAMLCollection: loaded = True - def __init__(self, data): + def __init__(self, data: list[dict[str, Any]]) -> None: """Initialize a resource YAML collection.""" self.data = data - async def async_get_info(self): + async def async_get_info(self) -> dict[str, int]: """Return the resources info for YAML mode.""" return {"resources": len(self.async_items() or [])} @@ -62,7 +62,7 @@ class ResourceStorageCollection(collection.DictStorageCollection): ) self.ll_config = ll_config - async def async_get_info(self): + async def async_get_info(self) -> dict[str, int]: """Return the resources info for YAML mode.""" if not self.loaded: await self.async_load() diff --git a/homeassistant/components/lovelace/system_health.py b/homeassistant/components/lovelace/system_health.py index 1e703768ae6..b629614d10d 100644 --- a/homeassistant/components/lovelace/system_health.py +++ b/homeassistant/components/lovelace/system_health.py @@ -1,12 +1,13 @@ """Provide info to system health.""" import asyncio +from typing import Any from homeassistant.components import system_health from homeassistant.const import CONF_MODE from homeassistant.core import HomeAssistant, callback -from .const import DOMAIN, MODE_AUTO, MODE_STORAGE, MODE_YAML +from .const import LOVELACE_DATA, MODE_AUTO, MODE_STORAGE, MODE_YAML @callback @@ -17,15 +18,17 @@ def async_register( register.async_register_info(system_health_info, "/config/lovelace") -async def system_health_info(hass): +async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: """Get info for the info page.""" - health_info = {"dashboards": len(hass.data[DOMAIN]["dashboards"])} - health_info.update(await hass.data[DOMAIN]["resources"].async_get_info()) + health_info: dict[str, Any] = { + "dashboards": len(hass.data[LOVELACE_DATA].dashboards) + } + health_info.update(await hass.data[LOVELACE_DATA].resources.async_get_info()) dashboards_info = await asyncio.gather( *( - hass.data[DOMAIN]["dashboards"][dashboard].async_get_info() - for dashboard in hass.data[DOMAIN]["dashboards"] + hass.data[LOVELACE_DATA].dashboards[dashboard].async_get_info() + for dashboard in hass.data[LOVELACE_DATA].dashboards ) ) @@ -39,7 +42,7 @@ async def system_health_info(hass): else: health_info[key] = dashboard[key] - if hass.data[DOMAIN][CONF_MODE] == MODE_YAML: + if hass.data[LOVELACE_DATA].mode == MODE_YAML: health_info[CONF_MODE] = MODE_YAML elif MODE_STORAGE in modes: health_info[CONF_MODE] = MODE_STORAGE diff --git a/homeassistant/components/lovelace/websocket.py b/homeassistant/components/lovelace/websocket.py index e402ba92f16..5feb7deb449 100644 --- a/homeassistant/components/lovelace/websocket.py +++ b/homeassistant/components/lovelace/websocket.py @@ -2,8 +2,9 @@ from __future__ import annotations +from collections.abc import Awaitable, Callable from functools import wraps -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -13,11 +14,21 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_fragment -from .const import CONF_URL_PATH, DOMAIN, ConfigNotFound -from .dashboard import LovelaceStorage +from .const import CONF_URL_PATH, LOVELACE_DATA, ConfigNotFound +from .dashboard import LovelaceConfig + +if TYPE_CHECKING: + from .resources import ResourceStorageCollection + +type AsyncLovelaceWebSocketCommandHandler[_R] = Callable[ + [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any], LovelaceConfig], + Awaitable[_R], +] -def _handle_errors(func): +def _handle_errors[_R]( + func: AsyncLovelaceWebSocketCommandHandler[_R], +) -> websocket_api.AsyncWebSocketCommandHandler: """Handle error with WebSocket calls.""" @wraps(func) @@ -27,7 +38,7 @@ def _handle_errors(func): msg: dict[str, Any], ) -> None: url_path = msg.get(CONF_URL_PATH) - config: LovelaceStorage | None = hass.data[DOMAIN]["dashboards"].get(url_path) + config = hass.data[LOVELACE_DATA].dashboards.get(url_path) if config is None: connection.send_error( @@ -74,7 +85,9 @@ async def websocket_lovelace_resources_impl( This function is called by both Storage and YAML mode WS handlers. """ - resources = hass.data[DOMAIN]["resources"] + resources = hass.data[LOVELACE_DATA].resources + if TYPE_CHECKING: + assert isinstance(resources, ResourceStorageCollection) if hass.config.safe_mode: connection.send_result(msg["id"], []) @@ -100,7 +113,7 @@ async def websocket_lovelace_config( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], - config: LovelaceStorage, + config: LovelaceConfig, ) -> json_fragment: """Send Lovelace UI config over WebSocket connection.""" return await config.async_json(msg["force"]) @@ -120,7 +133,7 @@ async def websocket_lovelace_save_config( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], - config: LovelaceStorage, + config: LovelaceConfig, ) -> None: """Save Lovelace UI configuration.""" await config.async_save(msg["config"]) @@ -139,7 +152,7 @@ async def websocket_lovelace_delete_config( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], - config: LovelaceStorage, + config: LovelaceConfig, ) -> None: """Delete Lovelace UI configuration.""" await config.async_delete() diff --git a/homeassistant/components/matter/icons.json b/homeassistant/components/matter/icons.json index ef29601b831..bd8665eb18b 100644 --- a/homeassistant/components/matter/icons.json +++ b/homeassistant/components/matter/icons.json @@ -36,6 +36,11 @@ } } }, + "select": { + "temperature_level": { + "default": "mdi:thermometer" + } + }, "sensor": { "contamination_state": { "default": "mdi:air-filter" @@ -61,6 +66,15 @@ "battery_replacement_description": { "default": "mdi:battery-sync-outline" } + }, + "switch": { + "child_lock": { + "default": "mdi:lock", + "state": { + "on": "mdi:lock", + "off": "mdi:lock-off" + } + } } } } diff --git a/homeassistant/components/matter/number.py b/homeassistant/components/matter/number.py index cc312cdc66a..22929c60b89 100644 --- a/homeassistant/components/matter/number.py +++ b/homeassistant/components/matter/number.py @@ -15,7 +15,13 @@ from homeassistant.components.number import ( NumberMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, Platform, UnitOfLength, UnitOfTime +from homeassistant.const import ( + EntityCategory, + Platform, + UnitOfLength, + UnitOfTemperature, + UnitOfTime, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -155,4 +161,25 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterNumber, required_attributes=(custom_clusters.EveCluster.Attributes.Altitude,), ), + MatterDiscoverySchema( + platform=Platform.NUMBER, + entity_description=MatterNumberEntityDescription( + key="EveTemperatureOffset", + device_class=NumberDeviceClass.TEMPERATURE, + entity_category=EntityCategory.CONFIG, + translation_key="temperature_offset", + native_max_value=25, + native_min_value=-25, + native_step=0.5, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + measurement_to_ha=lambda x: None if x is None else x / 10, + ha_to_native_value=lambda x: round(x * 10), + mode=NumberMode.BOX, + ), + entity_class=MatterNumber, + required_attributes=( + clusters.Thermostat.Attributes.LocalTemperatureCalibration, + ), + vendor_id=(4874,), + ), ] diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py index 1a2fc36c014..317c8515d4b 100644 --- a/homeassistant/components/matter/select.py +++ b/homeassistant/components/matter/select.py @@ -2,10 +2,12 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from chip.clusters import Objects as clusters +from chip.clusters.ClusterObjects import ClusterAttributeDescriptor, ClusterCommand from chip.clusters.Types import Nullable from matter_server.common.helpers.util import create_attribute_path_from_attribute @@ -47,7 +49,18 @@ class MatterSelectEntityDescription(SelectEntityDescription, MatterEntityDescrip """Describe Matter select entities.""" -class MatterSelectEntity(MatterEntity, SelectEntity): +@dataclass(frozen=True, kw_only=True) +class MatterListSelectEntityDescription(MatterSelectEntityDescription): + """Describe Matter select entities for MatterListSelectEntity.""" + + # command: a callback to create the command to send to the device + # the callback's argument will be the index of the selected list value + command: Callable[[int], ClusterCommand] + # list attribute: the attribute descriptor to get the list of values (= list of strings) + list_attribute: type[ClusterAttributeDescriptor] + + +class MatterAttributeSelectEntity(MatterEntity, SelectEntity): """Representation of a select entity from Matter Attribute read/write.""" entity_description: MatterSelectEntityDescription @@ -76,7 +89,7 @@ class MatterSelectEntity(MatterEntity, SelectEntity): self._attr_current_option = value_convert(value) -class MatterModeSelectEntity(MatterSelectEntity): +class MatterModeSelectEntity(MatterAttributeSelectEntity): """Representation of a select entity from Matter (Mode) Cluster attribute(s).""" async def async_select_option(self, option: str) -> None: @@ -111,6 +124,37 @@ class MatterModeSelectEntity(MatterSelectEntity): self._attr_name = desc +class MatterListSelectEntity(MatterEntity, SelectEntity): + """Representation of a select entity from Matter list and selected item Cluster attribute(s).""" + + entity_description: MatterListSelectEntityDescription + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + option_id = self._attr_options.index(option) + await self.matter_client.send_device_command( + node_id=self._endpoint.node.node_id, + endpoint_id=self._endpoint.endpoint_id, + command=self.entity_description.command(option_id), + ) + + @callback + def _update_from_device(self) -> None: + """Update from device.""" + list_values = cast( + list[str], + self.get_matter_attribute_value(self.entity_description.list_attribute), + ) + self._attr_options = list_values + current_option_idx: int = self.get_matter_attribute_value( + self._entity_info.primary_attribute + ) + try: + self._attr_current_option = list_values[current_option_idx] + except IndexError: + self._attr_current_option = None + + # Discovery schema(s) to map Matter Attributes to HA entities DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( @@ -230,7 +274,7 @@ DISCOVERY_SCHEMAS = [ "previous": None, }.get, ), - entity_class=MatterSelectEntity, + entity_class=MatterAttributeSelectEntity, required_attributes=(clusters.OnOff.Attributes.StartUpOnOff,), ), MatterDiscoverySchema( @@ -251,7 +295,44 @@ DISCOVERY_SCHEMAS = [ "low": 2, }.get, ), - entity_class=MatterSelectEntity, + entity_class=MatterAttributeSelectEntity, required_attributes=(clusters.SmokeCoAlarm.Attributes.SmokeSensitivityLevel,), ), + MatterDiscoverySchema( + platform=Platform.SELECT, + entity_description=MatterSelectEntityDescription( + key="TrvTemperatureDisplayMode", + entity_category=EntityCategory.CONFIG, + translation_key="temperature_display_mode", + options=["Celsius", "Fahrenheit"], + measurement_to_ha={ + 0: "Celsius", + 1: "Fahrenheit", + }.get, + ha_to_native_value={ + "Celsius": 0, + "Fahrenheit": 1, + }.get, + ), + entity_class=MatterAttributeSelectEntity, + required_attributes=( + clusters.ThermostatUserInterfaceConfiguration.Attributes.TemperatureDisplayMode, + ), + ), + MatterDiscoverySchema( + platform=Platform.SELECT, + entity_description=MatterListSelectEntityDescription( + key="TemperatureControlSelectedTemperatureLevel", + translation_key="temperature_level", + command=lambda selected_index: clusters.TemperatureControl.Commands.SetTemperature( + targetTemperatureLevel=selected_index + ), + list_attribute=clusters.TemperatureControl.Attributes.SupportedTemperatureLevels, + ), + entity_class=MatterListSelectEntity, + required_attributes=( + clusters.TemperatureControl.Attributes.SelectedTemperatureLevel, + clusters.TemperatureControl.Attributes.SupportedTemperatureLevels, + ), + ), ] diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index 847c9439b81..d8fe56278df 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -244,6 +244,8 @@ DISCOVERY_SCHEMAS = [ required_attributes=( clusters.PowerSource.Attributes.BatReplacementDescription, ), + # Some manufacturers returns an empty string + value_is_not="", ), MatterDiscoverySchema( platform=Platform.SENSOR, diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json index ca15538997e..4054adba530 100644 --- a/homeassistant/components/matter/strings.json +++ b/homeassistant/components/matter/strings.json @@ -161,6 +161,9 @@ }, "altitude": { "name": "Altitude above Sea Level" + }, + "temperature_offset": { + "name": "Temperature offset" } }, "light": { @@ -196,6 +199,12 @@ "toggle": "[%key:common::action::toggle%]", "previous": "Previous" } + }, + "temperature_level": { + "name": "Temperature level" + }, + "temperature_display_mode": { + "name": "Temperature display mode" } }, "sensor": { @@ -256,6 +265,9 @@ }, "power": { "name": "Power" + }, + "child_lock": { + "name": "Child lock" } }, "vacuum": { diff --git a/homeassistant/components/matter/switch.py b/homeassistant/components/matter/switch.py index 75269de953c..2a1e6d59a06 100644 --- a/homeassistant/components/matter/switch.py +++ b/homeassistant/components/matter/switch.py @@ -2,10 +2,12 @@ from __future__ import annotations +from dataclasses import dataclass from typing import Any from chip.clusters import Objects as clusters from matter_server.client.models import device_types +from matter_server.common.helpers.util import create_attribute_path_from_attribute from homeassistant.components.switch import ( SwitchDeviceClass, @@ -13,11 +15,11 @@ from homeassistant.components.switch import ( SwitchEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import MatterEntity +from .entity import MatterEntity, MatterEntityDescription from .helpers import get_matter from .models import MatterDiscoverySchema @@ -61,6 +63,49 @@ class MatterSwitch(MatterEntity, SwitchEntity): ) +@dataclass(frozen=True) +class MatterNumericSwitchEntityDescription( + SwitchEntityDescription, MatterEntityDescription +): + """Describe Matter Numeric Switch entities.""" + + +class MatterNumericSwitch(MatterSwitch): + """Representation of a Matter Enum Attribute as a Switch entity.""" + + entity_description: MatterNumericSwitchEntityDescription + + async def _async_set_native_value(self, value: bool) -> None: + """Update the current value.""" + matter_attribute = self._entity_info.primary_attribute + if value_convert := self.entity_description.ha_to_native_value: + send_value = value_convert(value) + await self.matter_client.write_attribute( + node_id=self._endpoint.node.node_id, + attribute_path=create_attribute_path_from_attribute( + self._endpoint.endpoint_id, + matter_attribute, + ), + value=send_value, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn switch on.""" + await self._async_set_native_value(True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn switch off.""" + await self._async_set_native_value(False) + + @callback + def _update_from_device(self) -> None: + """Update from device.""" + value = self.get_matter_attribute_value(self._entity_info.primary_attribute) + if value_convert := self.entity_description.measurement_to_ha: + value = value_convert(value) + self._attr_is_on = value + + # Discovery schema(s) to map Matter Attributes to HA entities DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( @@ -139,4 +184,25 @@ DISCOVERY_SCHEMAS = [ device_types.Speaker, ), ), + MatterDiscoverySchema( + platform=Platform.SWITCH, + entity_description=MatterNumericSwitchEntityDescription( + key="EveTrvChildLock", + entity_category=EntityCategory.CONFIG, + translation_key="child_lock", + measurement_to_ha={ + 0: False, + 1: True, + }.get, + ha_to_native_value={ + False: 0, + True: 1, + }.get, + ), + entity_class=MatterNumericSwitch, + required_attributes=( + clusters.ThermostatUserInterfaceConfiguration.Attributes.KeypadLockout, + ), + vendor_id=(4874,), + ), ] diff --git a/homeassistant/components/modbus/__init__.py b/homeassistant/components/modbus/__init__.py index a7b32119917..1a331e16482 100644 --- a/homeassistant/components/modbus/__init__.py +++ b/homeassistant/components/modbus/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations import logging -from typing import cast import voluptuous as vol @@ -143,7 +142,7 @@ from .const import ( UDP, DataType, ) -from .modbus import ModbusHub, async_modbus_setup +from .modbus import DATA_MODBUS_HUBS, ModbusHub, async_modbus_setup from .validators import ( duplicate_fan_mode_validator, duplicate_swing_mode_validator, @@ -458,7 +457,7 @@ CONFIG_SCHEMA = vol.Schema( def get_hub(hass: HomeAssistant, name: str) -> ModbusHub: """Return modbus hub with name.""" - return cast(ModbusHub, hass.data[DOMAIN][name]) + return hass.data[DATA_MODBUS_HUBS][name] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -468,12 +467,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def _reload_config(call: Event | ServiceCall) -> None: """Reload Modbus.""" - if DOMAIN not in hass.data: + if DATA_MODBUS_HUBS not in hass.data: _LOGGER.error("Modbus cannot reload, because it was never loaded") return - hubs = hass.data[DOMAIN] - for name in hubs: - await hubs[name].async_close() + hubs = hass.data[DATA_MODBUS_HUBS] + for hub in hubs.values(): + await hub.async_close() reset_platforms = async_get_platforms(hass, DOMAIN) for reset_platform in reset_platforms: _LOGGER.debug("Reload modbus resetting platform: %s", reset_platform.domain) @@ -487,7 +486,4 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async_register_admin_service(hass, DOMAIN, SERVICE_RELOAD, _reload_config) - return await async_modbus_setup( - hass, - config, - ) + return await async_modbus_setup(hass, config) diff --git a/homeassistant/components/modbus/binary_sensor.py b/homeassistant/components/modbus/binary_sensor.py index 97ade53762b..28d1be24587 100644 --- a/homeassistant/components/modbus/binary_sensor.py +++ b/homeassistant/components/modbus/binary_sensor.py @@ -2,7 +2,6 @@ from __future__ import annotations -from datetime import datetime import logging from typing import Any @@ -104,17 +103,13 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity): if state := await self.async_get_last_state(): self._attr_is_on = state.state == STATE_ON - async def async_update(self, now: datetime | None = None) -> None: + async def _async_update(self) -> None: """Update the state of the sensor.""" # do not allow multiple active calls to the same platform - if self._call_active: - return - self._call_active = True result = await self._hub.async_pb_call( self._slave, self._address, self._count, self._input_type ) - self._call_active = False if result is None: self._attr_available = False self._result = [] @@ -126,7 +121,6 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity): self._result = result.registers self._attr_is_on = bool(self._result[0] & 1) - self.async_write_ha_state() if self._coordinator: self._coordinator.async_set_updated_data(self._result) @@ -159,7 +153,6 @@ class SlaveSensor( """Handle entity which will be added.""" if state := await self.async_get_last_state(): self._attr_is_on = state.state == STATE_ON - self.async_write_ha_state() await super().async_added_to_hass() @callback diff --git a/homeassistant/components/modbus/climate.py b/homeassistant/components/modbus/climate.py index ba09bd08377..e1a2688048d 100644 --- a/homeassistant/components/modbus/climate.py +++ b/homeassistant/components/modbus/climate.py @@ -2,7 +2,6 @@ from __future__ import annotations -from datetime import datetime import logging import struct from typing import Any, cast @@ -113,15 +112,10 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Read configuration and create Modbus climate.""" - if discovery_info is None: + if discovery_info is None or not (climates := discovery_info[CONF_CLIMATES]): return - - entities = [] - for entity in discovery_info[CONF_CLIMATES]: - hub: ModbusHub = get_hub(hass, discovery_info[CONF_NAME]) - entities.append(ModbusThermostat(hass, hub, entity)) - - async_add_entities(entities) + hub = get_hub(hass, discovery_info[CONF_NAME]) + async_add_entities(ModbusThermostat(hass, hub, config) for config in climates) class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): @@ -313,7 +307,7 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): ) break - await self.async_update() + await self._async_update_write_state() async def async_set_fan_mode(self, fan_mode: str) -> None: """Set new target fan mode.""" @@ -335,7 +329,7 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): CALL_TYPE_WRITE_REGISTER, ) - await self.async_update() + await self._async_update_write_state() async def async_set_swing_mode(self, swing_mode: str) -> None: """Set new target swing mode.""" @@ -358,7 +352,7 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): CALL_TYPE_WRITE_REGISTER, ) break - await self.async_update() + await self._async_update_write_state() async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" @@ -413,9 +407,9 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): CALL_TYPE_WRITE_REGISTERS, ) self._attr_available = result is not None - await self.async_update() + await self._async_update_write_state() - async def async_update(self, now: datetime | None = None) -> None: + async def _async_update(self) -> None: """Update Target & Current Temperature.""" # remark "now" is a dummy parameter to avoid problems with # async_track_time_interval @@ -490,8 +484,6 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): if onoff == self._hvac_off_value: self._attr_hvac_mode = HVACMode.OFF - self.async_write_ha_state() - async def _async_read_register( self, register_type: str, register: int, raw: bool | None = False ) -> float | None: diff --git a/homeassistant/components/modbus/cover.py b/homeassistant/components/modbus/cover.py index eb9dac58900..5e7b008ff7c 100644 --- a/homeassistant/components/modbus/cover.py +++ b/homeassistant/components/modbus/cover.py @@ -2,7 +2,6 @@ from __future__ import annotations -from datetime import datetime from typing import Any from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState @@ -37,15 +36,10 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Read configuration and create Modbus cover.""" - if discovery_info is None: + if discovery_info is None or not (covers := discovery_info[CONF_COVERS]): return - - covers = [] - for cover in discovery_info[CONF_COVERS]: - hub: ModbusHub = get_hub(hass, discovery_info[CONF_NAME]) - covers.append(ModbusCover(hass, hub, cover)) - - async_add_entities(covers) + hub = get_hub(hass, discovery_info[CONF_NAME]) + async_add_entities(ModbusCover(hass, hub, config) for config in covers) class ModbusCover(BasePlatform, CoverEntity, RestoreEntity): @@ -117,7 +111,7 @@ class ModbusCover(BasePlatform, CoverEntity, RestoreEntity): self._slave, self._write_address, self._state_open, self._write_type ) self._attr_available = result is not None - await self.async_update() + await self._async_update_write_state() async def async_close_cover(self, **kwargs: Any) -> None: """Close cover.""" @@ -125,9 +119,9 @@ class ModbusCover(BasePlatform, CoverEntity, RestoreEntity): self._slave, self._write_address, self._state_closed, self._write_type ) self._attr_available = result is not None - await self.async_update() + await self._async_update_write_state() - async def async_update(self, now: datetime | None = None) -> None: + async def _async_update(self) -> None: """Update the state of the cover.""" # remark "now" is a dummy parameter to avoid problems with # async_track_time_interval @@ -136,11 +130,9 @@ class ModbusCover(BasePlatform, CoverEntity, RestoreEntity): ) if result is None: self._attr_available = False - self.async_write_ha_state() return self._attr_available = True if self._input_type == CALL_TYPE_COIL: self._set_attr_state(bool(result.bits[0] & 1)) else: self._set_attr_state(int(result.registers[0])) - self.async_write_ha_state() diff --git a/homeassistant/components/modbus/entity.py b/homeassistant/components/modbus/entity.py index 03bcc98de40..35b7c02aa05 100644 --- a/homeassistant/components/modbus/entity.py +++ b/homeassistant/components/modbus/entity.py @@ -3,6 +3,7 @@ from __future__ import annotations from abc import abstractmethod +import asyncio from collections.abc import Callable from datetime import datetime, timedelta import logging @@ -79,12 +80,14 @@ class BasePlatform(Entity): """Initialize the Modbus binary sensor.""" self._hub = hub - self._slave = entry.get(CONF_SLAVE) or entry.get(CONF_DEVICE_ADDRESS, 0) + if (conf_slave := entry.get(CONF_SLAVE)) is not None: + self._slave = conf_slave + else: + self._slave = entry.get(CONF_DEVICE_ADDRESS, 1) self._address = int(entry[CONF_ADDRESS]) self._input_type = entry[CONF_INPUT_TYPE] self._value: str | None = None self._scan_interval = int(entry[CONF_SCAN_INTERVAL]) - self._call_active = False self._cancel_timer: Callable[[], None] | None = None self._cancel_call: Callable[[], None] | None = None @@ -107,37 +110,73 @@ class BasePlatform(Entity): self._max_value = get_optional_numeric_config(CONF_MAX_VALUE) self._nan_value = entry.get(CONF_NAN_VALUE) self._zero_suppress = get_optional_numeric_config(CONF_ZERO_SUPPRESS) + self._update_lock = asyncio.Lock() @abstractmethod - async def async_update(self, now: datetime | None = None) -> None: + async def _async_update(self) -> None: """Virtual function to be overwritten.""" + async def async_update(self, now: datetime | None = None) -> None: + """Update the entity state.""" + async with self._update_lock: + await self._async_update() + + async def _async_update_write_state(self) -> None: + """Update the entity state and write it to the state machine.""" + await self.async_update() + self.async_write_ha_state() + + async def _async_update_if_not_in_progress( + self, now: datetime | None = None + ) -> None: + """Update the entity state if not already in progress.""" + if self._update_lock.locked(): + _LOGGER.debug("Update for entity %s is already in progress", self.name) + return + await self._async_update_write_state() + @callback def async_run(self) -> None: """Remote start entity.""" - self.async_hold(update=False) - self._cancel_call = async_call_later( - self.hass, timedelta(milliseconds=100), self.async_update - ) + self._async_cancel_update_polling() + self._async_schedule_future_update(0.1) if self._scan_interval > 0: self._cancel_timer = async_track_time_interval( - self.hass, self.async_update, timedelta(seconds=self._scan_interval) + self.hass, + self._async_update_if_not_in_progress, + timedelta(seconds=self._scan_interval), ) self._attr_available = True self.async_write_ha_state() @callback - def async_hold(self, update: bool = True) -> None: - """Remote stop entity.""" + def _async_schedule_future_update(self, delay: float) -> None: + """Schedule an update in the future.""" + self._async_cancel_future_pending_update() + self._cancel_call = async_call_later( + self.hass, delay, self._async_update_if_not_in_progress + ) + + @callback + def _async_cancel_future_pending_update(self) -> None: + """Cancel a future pending update.""" if self._cancel_call: self._cancel_call() self._cancel_call = None + + def _async_cancel_update_polling(self) -> None: + """Cancel the polling.""" if self._cancel_timer: self._cancel_timer() self._cancel_timer = None - if update: - self._attr_available = False - self.async_write_ha_state() + + @callback + def async_hold(self) -> None: + """Remote stop entity.""" + self._async_cancel_future_pending_update() + self._async_cancel_update_polling() + self._attr_available = False + self.async_write_ha_state() async def async_base_added_to_hass(self) -> None: """Handle entity which will be added.""" @@ -312,6 +351,7 @@ class BaseSwitch(BasePlatform, ToggleEntity, RestoreEntity): self._attr_is_on = True elif state.state == STATE_OFF: self._attr_is_on = False + await super().async_added_to_hass() async def async_turn(self, command: int) -> None: """Evaluate switch result.""" @@ -330,34 +370,29 @@ class BaseSwitch(BasePlatform, ToggleEntity, RestoreEntity): return if self._verify_delay: - async_call_later(self.hass, self._verify_delay, self.async_update) - else: - await self.async_update() + self._async_schedule_future_update(self._verify_delay) + return + + await self._async_update_write_state() async def async_turn_off(self, **kwargs: Any) -> None: """Set switch off.""" await self.async_turn(self._command_off) - async def async_update(self, now: datetime | None = None) -> None: + async def _async_update(self) -> None: """Update the entity state.""" # remark "now" is a dummy parameter to avoid problems with # async_track_time_interval if not self._verify_active: self._attr_available = True - self.async_write_ha_state() return # do not allow multiple active calls to the same platform - if self._call_active: - return - self._call_active = True result = await self._hub.async_pb_call( self._slave, self._verify_address, 1, self._verify_type ) - self._call_active = False if result is None: self._attr_available = False - self.async_write_ha_state() return self._attr_available = True @@ -379,4 +414,3 @@ class BaseSwitch(BasePlatform, ToggleEntity, RestoreEntity): self._verify_address, value, ) - self.async_write_ha_state() diff --git a/homeassistant/components/modbus/fan.py b/homeassistant/components/modbus/fan.py index bed8ff102bb..8636ef4521a 100644 --- a/homeassistant/components/modbus/fan.py +++ b/homeassistant/components/modbus/fan.py @@ -25,14 +25,10 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Read configuration and create Modbus fans.""" - if discovery_info is None: + if discovery_info is None or not (fans := discovery_info[CONF_FANS]): return - fans = [] - - for entry in discovery_info[CONF_FANS]: - hub: ModbusHub = get_hub(hass, discovery_info[CONF_NAME]) - fans.append(ModbusFan(hass, hub, entry)) - async_add_entities(fans) + hub = get_hub(hass, discovery_info[CONF_NAME]) + async_add_entities(ModbusFan(hass, hub, config) for config in fans) class ModbusFan(BaseSwitch, FanEntity): diff --git a/homeassistant/components/modbus/light.py b/homeassistant/components/modbus/light.py index 42745c2bb78..ce1c881733e 100644 --- a/homeassistant/components/modbus/light.py +++ b/homeassistant/components/modbus/light.py @@ -12,7 +12,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import get_hub from .entity import BaseSwitch -from .modbus import ModbusHub PARALLEL_UPDATES = 1 @@ -24,14 +23,10 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Read configuration and create Modbus lights.""" - if discovery_info is None: + if discovery_info is None or not (lights := discovery_info[CONF_LIGHTS]): return - - lights = [] - for entry in discovery_info[CONF_LIGHTS]: - hub: ModbusHub = get_hub(hass, discovery_info[CONF_NAME]) - lights.append(ModbusLight(hass, hub, entry)) - async_add_entities(lights) + hub = get_hub(hass, discovery_info[CONF_NAME]) + async_add_entities(ModbusLight(hass, hub, config) for config in lights) class ModbusLight(BaseSwitch, LightEntity): diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 8c8a879ead6..319c68f50f0 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -35,6 +35,7 @@ from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import ( ATTR_ADDRESS, @@ -70,50 +71,59 @@ from .const import ( from .validators import check_config _LOGGER = logging.getLogger(__name__) +DATA_MODBUS_HUBS: HassKey[dict[str, ModbusHub]] = HassKey(DOMAIN) -ConfEntry = namedtuple("ConfEntry", "call_type attr func_name") # noqa: PYI024 -RunEntry = namedtuple("RunEntry", "attr func") # noqa: PYI024 +ConfEntry = namedtuple("ConfEntry", "call_type attr func_name value_attr_name") # noqa: PYI024 +RunEntry = namedtuple("RunEntry", "attr func value_attr_name") # noqa: PYI024 PB_CALL = [ ConfEntry( CALL_TYPE_COIL, "bits", "read_coils", + "count", ), ConfEntry( CALL_TYPE_DISCRETE, "bits", "read_discrete_inputs", + "count", ), ConfEntry( CALL_TYPE_REGISTER_HOLDING, "registers", "read_holding_registers", + "count", ), ConfEntry( CALL_TYPE_REGISTER_INPUT, "registers", "read_input_registers", + "count", ), ConfEntry( CALL_TYPE_WRITE_COIL, - "value", + "bits", "write_coil", + "value", ), ConfEntry( CALL_TYPE_WRITE_COILS, "count", "write_coils", + "values", ), ConfEntry( CALL_TYPE_WRITE_REGISTER, - "value", + "registers", "write_register", + "value", ), ConfEntry( CALL_TYPE_WRITE_REGISTERS, "count", "write_registers", + "values", ), ] @@ -128,14 +138,14 @@ async def async_modbus_setup( config[DOMAIN] = check_config(hass, config[DOMAIN]) if not config[DOMAIN]: return False - if DOMAIN in hass.data and config[DOMAIN] == []: - hubs = hass.data[DOMAIN] - for name in hubs: - if not await hubs[name].async_setup(): + if DATA_MODBUS_HUBS in hass.data and config[DOMAIN] == []: + hubs = hass.data[DATA_MODBUS_HUBS] + for hub in hubs.values(): + if not await hub.async_setup(): return False - hub_collect = hass.data[DOMAIN] + hub_collect = hass.data[DATA_MODBUS_HUBS] else: - hass.data[DOMAIN] = hub_collect = {} + hass.data[DATA_MODBUS_HUBS] = hub_collect = {} for conf_hub in config[DOMAIN]: my_hub = ModbusHub(hass, conf_hub) @@ -322,7 +332,9 @@ class ModbusHub: for entry in PB_CALL: func = getattr(self._client, entry.func_name) - self._pb_request[entry.call_type] = RunEntry(entry.attr, func) + self._pb_request[entry.call_type] = RunEntry( + entry.attr, func, entry.value_attr_name + ) self.hass.async_create_background_task( self.async_pb_connect(), "modbus-connect" @@ -368,10 +380,13 @@ class ModbusHub: self, slave: int | None, address: int, value: int | list[int], use_call: str ) -> ModbusPDU | None: """Call sync. pymodbus.""" - kwargs = {"slave": slave} if slave else {} + kwargs: dict[str, Any] = ( + {ATTR_SLAVE: slave} if slave is not None else {ATTR_SLAVE: 1} + ) entry = self._pb_request[use_call] + kwargs[entry.value_attr_name] = value try: - result: ModbusPDU = await entry.func(address, value, **kwargs) + result: ModbusPDU = await entry.func(address, **kwargs) except ModbusException as exception_error: error = f"Error: device: {slave} address: {address} -> {exception_error!s}" self._log_error(error) diff --git a/homeassistant/components/modbus/sensor.py b/homeassistant/components/modbus/sensor.py index d5a16c95cc4..2c2efb70d5a 100644 --- a/homeassistant/components/modbus/sensor.py +++ b/homeassistant/components/modbus/sensor.py @@ -2,7 +2,6 @@ from __future__ import annotations -from datetime import datetime import logging from typing import Any @@ -106,7 +105,7 @@ class ModbusRegisterSensor(BaseStructPlatform, RestoreSensor, SensorEntity): if state: self._attr_native_value = state.native_value - async def async_update(self, now: datetime | None = None) -> None: + async def _async_update(self) -> None: """Update the state of the sensor.""" # remark "now" is a dummy parameter to avoid problems with # async_track_time_interval diff --git a/homeassistant/components/modbus/switch.py b/homeassistant/components/modbus/switch.py index 71413391a5f..44b0575d419 100644 --- a/homeassistant/components/modbus/switch.py +++ b/homeassistant/components/modbus/switch.py @@ -12,7 +12,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import get_hub from .entity import BaseSwitch -from .modbus import ModbusHub PARALLEL_UPDATES = 1 @@ -24,15 +23,10 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Read configuration and create Modbus switches.""" - switches = [] - - if discovery_info is None: + if discovery_info is None or not (switches := discovery_info[CONF_SWITCHES]): return - - for entry in discovery_info[CONF_SWITCHES]: - hub: ModbusHub = get_hub(hass, discovery_info[CONF_NAME]) - switches.append(ModbusSwitch(hass, hub, entry)) - async_add_entities(switches) + hub = get_hub(hass, discovery_info[CONF_NAME]) + async_add_entities(ModbusSwitch(hass, hub, config) for config in switches) class ModbusSwitch(BaseSwitch, SwitchEntity): diff --git a/homeassistant/components/myuplink/manifest.json b/homeassistant/components/myuplink/manifest.json index 8438d24194c..d3242115acb 100644 --- a/homeassistant/components/myuplink/manifest.json +++ b/homeassistant/components/myuplink/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/myuplink", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["myuplink==0.6.0"] + "requirements": ["myuplink==0.7.0"] } diff --git a/homeassistant/components/myuplink/sensor.py b/homeassistant/components/myuplink/sensor.py index ef827fc1fb1..fa50e8a7001 100644 --- a/homeassistant/components/myuplink/sensor.py +++ b/homeassistant/components/myuplink/sensor.py @@ -325,10 +325,10 @@ class MyUplinkEnumSensor(MyUplinkDevicePointSensor): } @property - def native_value(self) -> str: + def native_value(self) -> str | None: """Sensor state value for enum sensor.""" device_point = self.coordinator.data.points[self.device_id][self.point_id] - return self.options_map[str(int(device_point.value))] # type: ignore[no-any-return] + return self.options_map.get(str(int(device_point.value))) class MyUplinkEnumRawSensor(MyUplinkDevicePointSensor): diff --git a/homeassistant/components/netgear/router.py b/homeassistant/components/netgear/router.py index 1e4bf2480e9..d81f556193b 100644 --- a/homeassistant/components/netgear/router.py +++ b/homeassistant/components/netgear/router.py @@ -210,6 +210,12 @@ class NetgearRouter: for device in self.devices.values(): device["active"] = now - device["last_seen"] <= self._consider_home + if not device["active"]: + device["link_rate"] = None + device["signal"] = None + device["ip"] = None + device["ssid"] = None + device["conn_ap_mac"] = None if new_device: _LOGGER.debug("Netgear tracker: new device found") diff --git a/homeassistant/components/netgear/sensor.py b/homeassistant/components/netgear/sensor.py index 4751e58a6a1..d807f7aed0a 100644 --- a/homeassistant/components/netgear/sensor.py +++ b/homeassistant/components/netgear/sensor.py @@ -344,6 +344,11 @@ class NetgearSensorEntity(NetgearDeviceEntity, SensorEntity): self._attr_unique_id = f"{self._mac}-{attribute}" self._state = device.get(attribute) + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self._device.get(self._attribute) is not None + @property def native_value(self): """Return the state of the sensor.""" diff --git a/homeassistant/components/network/strings.json b/homeassistant/components/network/strings.json new file mode 100644 index 00000000000..6aca7343221 --- /dev/null +++ b/homeassistant/components/network/strings.json @@ -0,0 +1,10 @@ +{ + "system_health": { + "info": { + "adapters": "Adapters", + "ipv4_addresses": "IPv4 addresses", + "ipv6_addresses": "IPv6 addresses", + "announce_addresses": "Announce addresses" + } + } +} diff --git a/homeassistant/components/network/system_health.py b/homeassistant/components/network/system_health.py new file mode 100644 index 00000000000..ebabe055539 --- /dev/null +++ b/homeassistant/components/network/system_health.py @@ -0,0 +1,53 @@ +"""Provide info to system health.""" + +from typing import Any + +from homeassistant.components import system_health +from homeassistant.core import HomeAssistant, callback + +from . import Adapter, async_get_adapters, async_get_announce_addresses +from .models import IPv4ConfiguredAddress, IPv6ConfiguredAddress + + +@callback +def async_register( + hass: HomeAssistant, register: system_health.SystemHealthRegistration +) -> None: + """Register system health callbacks.""" + register.async_register_info(system_health_info, "/config/network") + + +def _format_ips(ips: list[IPv4ConfiguredAddress] | list[IPv6ConfiguredAddress]) -> str: + return ", ".join([f"{ip['address']}/{ip['network_prefix']!s}" for ip in ips]) + + +def _get_adapter_info(adapter: Adapter) -> str: + state = "enabled" if adapter["enabled"] else "disabled" + default = ", default" if adapter["default"] else "" + auto = ", auto" if adapter["auto"] else "" + return f"{adapter['name']} ({state}{default}{auto})" + + +async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: + """Get info for the info page.""" + + adapters = await async_get_adapters(hass) + data: dict[str, Any] = { + # k: v for adapter in adapters for k, v in _get_adapter_info(adapter).items() + "adapters": ", ".join([_get_adapter_info(adapter) for adapter in adapters]), + "ipv4_addresses": ", ".join( + [ + f"{adapter['name']} ({_format_ips(adapter['ipv4'])})" + for adapter in adapters + ] + ), + "ipv6_addresses": ", ".join( + [ + f"{adapter['name']} ({_format_ips(adapter['ipv6'])})" + for adapter in adapters + ] + ), + "announce_addresses": ", ".join(await async_get_announce_addresses(hass)), + } + + return data diff --git a/homeassistant/components/niko_home_control/manifest.json b/homeassistant/components/niko_home_control/manifest.json index a75b0d72dca..57f83180eb0 100644 --- a/homeassistant/components/niko_home_control/manifest.json +++ b/homeassistant/components/niko_home_control/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/niko_home_control", "iot_class": "local_push", "loggers": ["nikohomecontrol"], - "requirements": ["nhc==0.3.4"] + "requirements": ["nhc==0.3.9"] } diff --git a/homeassistant/components/nissan_leaf/strings.json b/homeassistant/components/nissan_leaf/strings.json index d733e39a0fc..78335ab4c14 100644 --- a/homeassistant/components/nissan_leaf/strings.json +++ b/homeassistant/components/nissan_leaf/strings.json @@ -2,17 +2,17 @@ "services": { "start_charge": { "name": "Start charge", - "description": "Starts the vehicle charging. It must be plugged in first!\n.", + "description": "Starts the vehicle charging. It must be plugged in first!", "fields": { "vin": { "name": "VIN", - "description": "The vehicle identification number (VIN) of the vehicle, 17 characters\n." + "description": "The vehicle identification number (VIN) of the vehicle, 17 characters." } } }, "update": { "name": "Update", - "description": "Fetches the last state of the vehicle of all your accounts, requesting an update from of the state from the car if possible.\n.", + "description": "Fetches the last state of the vehicle of all your accounts, requesting an update from of the state from the car if possible.", "fields": { "vin": { "name": "[%key:component::nissan_leaf::services::start_charge::fields::vin::name%]", diff --git a/homeassistant/components/nmap_tracker/strings.json b/homeassistant/components/nmap_tracker/strings.json index ef660c7e991..3cbbea007b1 100644 --- a/homeassistant/components/nmap_tracker/strings.json +++ b/homeassistant/components/nmap_tracker/strings.json @@ -21,7 +21,7 @@ "config": { "step": { "user": { - "description": "Configure hosts to be scanned by Nmap. Network address and excludes can be IP Addresses (192.168.1.1), IP Networks (192.168.0.0/24) or IP Ranges (192.168.1.0-32).", + "description": "Configure hosts to be scanned by Nmap. Network address and excludes can be IP addresses (192.168.1.1), IP networks (192.168.0.0/24) or IP ranges (192.168.1.0-32).", "data": { "hosts": "Network addresses (comma separated) to scan", "home_interval": "Minimum number of minutes between scans of active devices (preserve battery)", @@ -31,7 +31,7 @@ } }, "error": { - "invalid_hosts": "Invalid Hosts" + "invalid_hosts": "Invalid hosts" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_location%]" diff --git a/homeassistant/components/ohme/const.py b/homeassistant/components/ohme/const.py index 770d18e823a..308664ba0ad 100644 --- a/homeassistant/components/ohme/const.py +++ b/homeassistant/components/ohme/const.py @@ -3,4 +3,10 @@ from homeassistant.const import Platform DOMAIN = "ohme" -PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH] +PLATFORMS = [ + Platform.BUTTON, + Platform.NUMBER, + Platform.SENSOR, + Platform.SWITCH, + Platform.TIME, +] diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json index 6fa7925aa02..a6b04004833 100644 --- a/homeassistant/components/ohme/icons.json +++ b/homeassistant/components/ohme/icons.json @@ -5,6 +5,11 @@ "default": "mdi:check-decagram" } }, + "number": { + "target_percentage": { + "default": "mdi:battery-heart" + } + }, "sensor": { "status": { "default": "mdi:car", @@ -36,6 +41,11 @@ "off": "mdi:sleep-off" } } + }, + "time": { + "target_time": { + "default": "mdi:clock-end" + } } }, "services": { diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json index 935975502d0..67c41550491 100644 --- a/homeassistant/components/ohme/manifest.json +++ b/homeassistant/components/ohme/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["ohme==1.2.3"] + "requirements": ["ohme==1.2.5"] } diff --git a/homeassistant/components/ohme/number.py b/homeassistant/components/ohme/number.py new file mode 100644 index 00000000000..d618d4a873b --- /dev/null +++ b/homeassistant/components/ohme/number.py @@ -0,0 +1,77 @@ +"""Platform for number.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from ohme import ApiException, OhmeApiClient + +from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.const import PERCENTAGE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .const import DOMAIN +from .entity import OhmeEntity, OhmeEntityDescription + +PARALLEL_UPDATES = 1 + + +@dataclass(frozen=True, kw_only=True) +class OhmeNumberDescription(OhmeEntityDescription, NumberEntityDescription): + """Class describing Ohme number entities.""" + + set_fn: Callable[[OhmeApiClient, float], Awaitable[None]] + value_fn: Callable[[OhmeApiClient], float] + + +NUMBER_DESCRIPTION = [ + OhmeNumberDescription( + key="target_percentage", + translation_key="target_percentage", + value_fn=lambda client: client.target_soc, + set_fn=lambda client, value: client.async_set_target(target_percent=value), + native_min_value=0, + native_max_value=100, + native_step=1, + native_unit_of_measurement=PERCENTAGE, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up numbers.""" + coordinators = config_entry.runtime_data + coordinator = coordinators.charge_session_coordinator + + async_add_entities( + OhmeNumber(coordinator, description) + for description in NUMBER_DESCRIPTION + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeNumber(OhmeEntity, NumberEntity): + """Generic number entity for Ohme.""" + + entity_description: OhmeNumberDescription + + @property + def native_value(self) -> float: + """Return the current value of the number.""" + return self.entity_description.value_fn(self.coordinator.client) + + async def async_set_native_value(self, value: float) -> None: + """Set the number value.""" + try: + await self.entity_description.set_fn(self.coordinator.client, value) + except ApiException as e: + raise HomeAssistantError( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json index 4c45f8eca8c..84f62ba65ab 100644 --- a/homeassistant/components/ohme/strings.json +++ b/homeassistant/components/ohme/strings.json @@ -50,6 +50,11 @@ "name": "Approve charge" } }, + "number": { + "target_percentage": { + "name": "Target percentage" + } + }, "sensor": { "status": { "name": "Status", @@ -78,6 +83,11 @@ "sleep_when_inactive": { "name": "Sleep when inactive" } + }, + "time": { + "target_time": { + "name": "Target time" + } } }, "exceptions": { diff --git a/homeassistant/components/ohme/time.py b/homeassistant/components/ohme/time.py new file mode 100644 index 00000000000..a7de913ef8e --- /dev/null +++ b/homeassistant/components/ohme/time.py @@ -0,0 +1,77 @@ +"""Platform for time.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from datetime import time + +from ohme import ApiException, OhmeApiClient + +from homeassistant.components.time import TimeEntity, TimeEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .const import DOMAIN +from .entity import OhmeEntity, OhmeEntityDescription + +PARALLEL_UPDATES = 1 + + +@dataclass(frozen=True, kw_only=True) +class OhmeTimeDescription(OhmeEntityDescription, TimeEntityDescription): + """Class describing Ohme time entities.""" + + set_fn: Callable[[OhmeApiClient, time], Awaitable[None]] + value_fn: Callable[[OhmeApiClient], time] + + +TIME_DESCRIPTION = [ + OhmeTimeDescription( + key="target_time", + translation_key="target_time", + value_fn=lambda client: time( + hour=client.target_time[0], minute=client.target_time[1] + ), + set_fn=lambda client, value: client.async_set_target( + target_time=(value.hour, value.minute) + ), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up time entities.""" + coordinators = config_entry.runtime_data + coordinator = coordinators.charge_session_coordinator + + async_add_entities( + OhmeTime(coordinator, description) + for description in TIME_DESCRIPTION + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeTime(OhmeEntity, TimeEntity): + """Generic time entity for Ohme.""" + + entity_description: OhmeTimeDescription + + @property + def native_value(self) -> time: + """Return the current value of the time.""" + return self.entity_description.value_fn(self.coordinator.client) + + async def async_set_value(self, value: time) -> None: + """Set the time value.""" + try: + await self.entity_description.set_fn(self.coordinator.client, value) + except ApiException as e: + raise HomeAssistantError( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/ollama/manifest.json b/homeassistant/components/ollama/manifest.json index dbecbf87e4e..c3f7616ca16 100644 --- a/homeassistant/components/ollama/manifest.json +++ b/homeassistant/components/ollama/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/ollama", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["ollama==0.4.5"] + "requirements": ["ollama==0.4.7"] } diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py index 974b4082cae..228748d5257 100644 --- a/homeassistant/components/onkyo/config_flow.py +++ b/homeassistant/components/onkyo/config_flow.py @@ -15,6 +15,7 @@ from homeassistant.config_entries import ( ) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import callback +from homeassistant.data_entry_flow import section from homeassistant.helpers.selector import ( NumberSelector, NumberSelectorConfig, @@ -49,9 +50,13 @@ INPUT_SOURCES_ALL_MEANINGS = [ input_source.value_meaning for input_source in InputSource ] STEP_MANUAL_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) -STEP_CONFIGURE_SCHEMA = vol.Schema( +STEP_RECONFIGURE_SCHEMA = vol.Schema( { vol.Required(OPTION_VOLUME_RESOLUTION): vol.In(VOLUME_RESOLUTION_ALLOWED), + } +) +STEP_CONFIGURE_SCHEMA = STEP_RECONFIGURE_SCHEMA.extend( + { vol.Required(OPTION_INPUT_SOURCES): SelectSelector( SelectSelectorConfig( options=INPUT_SOURCES_ALL_MEANINGS, @@ -216,55 +221,52 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the configuration of a single receiver.""" errors = {} - entry = None - entry_options = None + reconfigure_entry = None + schema = STEP_CONFIGURE_SCHEMA if self.source == SOURCE_RECONFIGURE: - entry = self._get_reconfigure_entry() - entry_options = entry.options + schema = STEP_RECONFIGURE_SCHEMA + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - source_meanings: list[str] = user_input[OPTION_INPUT_SOURCES] - if not source_meanings: + volume_resolution = user_input[OPTION_VOLUME_RESOLUTION] + + if reconfigure_entry is not None: + entry_options = reconfigure_entry.options + result = self.async_update_reload_and_abort( + reconfigure_entry, + data={ + CONF_HOST: self._receiver_info.host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: entry_options[OPTION_MAX_VOLUME], + OPTION_INPUT_SOURCES: entry_options[OPTION_INPUT_SOURCES], + }, + ) + + _LOGGER.debug("Reconfigured receiver, result: %s", result) + return result + + input_source_meanings: list[str] = user_input[OPTION_INPUT_SOURCES] + if not input_source_meanings: errors[OPTION_INPUT_SOURCES] = "empty_input_source_list" else: - sources_store: dict[str, str] = {} - for source_meaning in source_meanings: - source = InputSource.from_meaning(source_meaning) + input_sources_store: dict[str, str] = {} + for input_source_meaning in input_source_meanings: + input_source = InputSource.from_meaning(input_source_meaning) + input_sources_store[input_source.value] = input_source_meaning - source_name = source_meaning - if entry_options is not None: - source_name = entry_options[OPTION_INPUT_SOURCES].get( - source.value, source_name - ) - sources_store[source.value] = source_name - - volume_resolution = user_input[OPTION_VOLUME_RESOLUTION] - - if entry_options is None: - result = self.async_create_entry( - title=self._receiver_info.model_name, - data={ - CONF_HOST: self._receiver_info.host, - }, - options={ - OPTION_VOLUME_RESOLUTION: volume_resolution, - OPTION_MAX_VOLUME: OPTION_MAX_VOLUME_DEFAULT, - OPTION_INPUT_SOURCES: sources_store, - }, - ) - else: - assert entry is not None - result = self.async_update_reload_and_abort( - entry, - data={ - CONF_HOST: self._receiver_info.host, - }, - options={ - OPTION_VOLUME_RESOLUTION: volume_resolution, - OPTION_MAX_VOLUME: entry_options[OPTION_MAX_VOLUME], - OPTION_INPUT_SOURCES: sources_store, - }, - ) + result = self.async_create_entry( + title=self._receiver_info.model_name, + data={ + CONF_HOST: self._receiver_info.host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: OPTION_MAX_VOLUME_DEFAULT, + OPTION_INPUT_SOURCES: input_sources_store, + }, + ) _LOGGER.debug("Configured receiver, result: %s", result) return result @@ -273,12 +275,13 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): suggested_values = user_input if suggested_values is None: - if entry_options is None: + if reconfigure_entry is None: suggested_values = { OPTION_VOLUME_RESOLUTION: OPTION_VOLUME_RESOLUTION_DEFAULT, OPTION_INPUT_SOURCES: [], } else: + entry_options = reconfigure_entry.options suggested_values = { OPTION_VOLUME_RESOLUTION: entry_options[OPTION_VOLUME_RESOLUTION], OPTION_INPUT_SOURCES: [ @@ -289,9 +292,7 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="configure_receiver", - data_schema=self.add_suggested_values_to_schema( - STEP_CONFIGURE_SCHEMA, suggested_values - ), + data_schema=self.add_suggested_values_to_schema(schema, suggested_values), errors=errors, description_placeholders={ "name": f"{self._receiver_info.model_name} ({self._receiver_info.host})" @@ -360,57 +361,107 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> OptionsFlow: + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Return the options flow.""" - return OnkyoOptionsFlowHandler(config_entry) + return OnkyoOptionsFlowHandler() + + +OPTIONS_STEP_INIT_SCHEMA = vol.Schema( + { + vol.Required(OPTION_MAX_VOLUME): NumberSelector( + NumberSelectorConfig(min=1, max=100, mode=NumberSelectorMode.BOX) + ), + vol.Required(OPTION_INPUT_SOURCES): SelectSelector( + SelectSelectorConfig( + options=INPUT_SOURCES_ALL_MEANINGS, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + } +) class OnkyoOptionsFlowHandler(OptionsFlow): """Handle an options flow for Onkyo.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - sources_store: dict[str, str] = config_entry.options[OPTION_INPUT_SOURCES] - self._input_sources = {InputSource(k): v for k, v in sources_store.items()} + _data: dict[str, Any] + _input_sources: dict[InputSource, str] async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage the options.""" + errors = {} + + entry_options = self.config_entry.options + if user_input is not None: - sources_store: dict[str, str] = {} - for source_meaning, source_name in user_input.items(): - if source_meaning in INPUT_SOURCES_ALL_MEANINGS: - source = InputSource.from_meaning(source_meaning) - sources_store[source.value] = source_name + self._input_sources = {} + for input_source_meaning in user_input[OPTION_INPUT_SOURCES]: + input_source = InputSource.from_meaning(input_source_meaning) + input_source_name = entry_options[OPTION_INPUT_SOURCES].get( + input_source.value, input_source_meaning + ) + self._input_sources[input_source] = input_source_name + + if not self._input_sources: + errors[OPTION_INPUT_SOURCES] = "empty_input_source_list" + else: + self._data = { + OPTION_VOLUME_RESOLUTION: entry_options[OPTION_VOLUME_RESOLUTION], + OPTION_MAX_VOLUME: user_input[OPTION_MAX_VOLUME], + } + + return await self.async_step_names() + + suggested_values = user_input + if suggested_values is None: + suggested_values = { + OPTION_MAX_VOLUME: entry_options[OPTION_MAX_VOLUME], + OPTION_INPUT_SOURCES: [ + InputSource(input_source).value_meaning + for input_source in entry_options[OPTION_INPUT_SOURCES] + ], + } + + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + OPTIONS_STEP_INIT_SCHEMA, suggested_values + ), + errors=errors, + ) + + async def async_step_names( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure names.""" + if user_input is not None: + input_sources_store: dict[str, str] = {} + for input_source_meaning, input_source_name in user_input[ + "input_sources" + ].items(): + input_source = InputSource.from_meaning(input_source_meaning) + input_sources_store[input_source.value] = input_source_name return self.async_create_entry( data={ - OPTION_VOLUME_RESOLUTION: self.config_entry.options[ - OPTION_VOLUME_RESOLUTION - ], - OPTION_MAX_VOLUME: user_input[OPTION_MAX_VOLUME], - OPTION_INPUT_SOURCES: sources_store, + **self._data, + OPTION_INPUT_SOURCES: input_sources_store, } ) schema_dict: dict[Any, Selector] = {} - max_volume: float = self.config_entry.options[OPTION_MAX_VOLUME] - schema_dict[vol.Required(OPTION_MAX_VOLUME, default=max_volume)] = ( - NumberSelector( - NumberSelectorConfig(min=1, max=100, mode=NumberSelectorMode.BOX) - ) - ) - - for source, source_name in self._input_sources.items(): - schema_dict[vol.Required(source.value_meaning, default=source_name)] = ( - TextSelector() - ) + for input_source, input_source_name in self._input_sources.items(): + schema_dict[ + vol.Required(input_source.value_meaning, default=input_source_name) + ] = TextSelector() return self.async_show_form( - step_id="init", - data_schema=vol.Schema(schema_dict), + step_id="names", + data_schema=vol.Schema( + {vol.Required("input_sources"): section(vol.Schema(schema_dict))} + ), ) diff --git a/homeassistant/components/onkyo/strings.json b/homeassistant/components/onkyo/strings.json index 849171c7161..b3b14efec44 100644 --- a/homeassistant/components/onkyo/strings.json +++ b/homeassistant/components/onkyo/strings.json @@ -27,17 +27,17 @@ "description": "Configure {name}", "data": { "volume_resolution": "Volume resolution", - "input_sources": "Input sources" + "input_sources": "[%key:component::onkyo::options::step::init::data::input_sources%]" }, "data_description": { "volume_resolution": "Number of steps it takes for the receiver to go from the lowest to the highest possible volume.", - "input_sources": "List of input sources supported by the receiver." + "input_sources": "[%key:component::onkyo::options::step::init::data_description::input_sources%]" } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "empty_input_source_list": "Input source list cannot be empty", + "empty_input_source_list": "[%key:component::onkyo::options::error::empty_input_source_list%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { @@ -52,12 +52,25 @@ "step": { "init": { "data": { - "max_volume": "Maximum volume limit (%)" + "max_volume": "Maximum volume limit (%)", + "input_sources": "Input sources" }, "data_description": { - "max_volume": "Maximum volume limit as a percentage. This will associate Home Assistant's maximum volume to this value on the receiver, i.e., if you set this to 50%, then setting the volume to 100% in Home Assistant will cause the volume on the receiver to be set to 50% of its maximum value." + "max_volume": "Maximum volume limit as a percentage. This will associate Home Assistant's maximum volume to this value on the receiver, i.e., if you set this to 50%, then setting the volume to 100% in Home Assistant will cause the volume on the receiver to be set to 50% of its maximum value.", + "input_sources": "List of input sources supported by the receiver." + } + }, + "names": { + "sections": { + "input_sources": { + "name": "Input source names", + "description": "Mappings of receiver's input sources to their names." + } } } + }, + "error": { + "empty_input_source_list": "Input source list cannot be empty" } }, "issues": { diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index fc5de57508b..f645444f9c6 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -11,6 +11,7 @@ from urllib.parse import urlparse from onvif.util import is_auth_error, stringify_onvif_error import voluptuous as vol from wsdiscovery.discovery import ThreadedWSDiscovery as WSDiscovery +from wsdiscovery.qname import QName from wsdiscovery.scope import Scope from wsdiscovery.service import Service from zeep.exceptions import Fault @@ -58,16 +59,22 @@ CONF_MANUAL_INPUT = "Manually configure ONVIF device" def wsdiscovery() -> list[Service]: """Get ONVIF Profile S devices from network.""" - discovery = WSDiscovery(ttl=4) + discovery = WSDiscovery(ttl=4, relates_to=True) try: discovery.start() return discovery.searchServices( - scopes=[Scope("onvif://www.onvif.org/Profile/Streaming")] + types=[ + QName( + "http://www.onvif.org/ver10/network/wsdl", + "NetworkVideoTransmitter", + "dp0", + ) + ], + scopes=[Scope("onvif://www.onvif.org/Profile/Streaming")], + timeout=10, ) finally: discovery.stop() - # Stop the threads started by WSDiscovery since otherwise there is a leak. - discovery._stopThreads() # noqa: SLF001 async def async_discovery(hass: HomeAssistant) -> list[dict[str, Any]]: diff --git a/homeassistant/components/onvif/manifest.json b/homeassistant/components/onvif/manifest.json index 9d27314593c..78df5130aed 100644 --- a/homeassistant/components/onvif/manifest.json +++ b/homeassistant/components/onvif/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/onvif", "iot_class": "local_push", "loggers": ["onvif", "wsdiscovery", "zeep"], - "requirements": ["onvif-zeep-async==3.2.3", "WSDiscovery==2.0.0"] + "requirements": ["onvif-zeep-async==3.2.5", "WSDiscovery==2.1.2"] } diff --git a/homeassistant/components/onvif/parsers.py b/homeassistant/components/onvif/parsers.py index 9904a4bbfa9..6eb1d001796 100644 --- a/homeassistant/components/onvif/parsers.py +++ b/homeassistant/components/onvif/parsers.py @@ -381,6 +381,9 @@ _TAPO_EVENT_TEMPLATES: dict[str, Event] = { "IsPeople": Event( uid="", name="Person Detection", platform="binary_sensor", device_class="motion" ), + "IsPet": Event( + uid="", name="Pet Detection", platform="binary_sensor", device_class="motion" + ), "IsLineCross": Event( uid="", name="Line Detector Crossed", diff --git a/homeassistant/components/openai_conversation/manifest.json b/homeassistant/components/openai_conversation/manifest.json index fcbdc996ce5..9b70246117c 100644 --- a/homeassistant/components/openai_conversation/manifest.json +++ b/homeassistant/components/openai_conversation/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/openai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["openai==1.35.7"] + "requirements": ["openai==1.59.9"] } diff --git a/homeassistant/components/osoenergy/strings.json b/homeassistant/components/osoenergy/strings.json index b8f95c021fa..ca23265048f 100644 --- a/homeassistant/components/osoenergy/strings.json +++ b/homeassistant/components/osoenergy/strings.json @@ -2,15 +2,15 @@ "config": { "step": { "user": { - "title": "OSO Energy Auth", - "description": "Enter the generated 'Subscription Key' for your account at 'https://portal.osoenergy.no/'", + "title": "OSO Energy auth", + "description": "Enter the 'Subscription key' for your account generated at 'https://portal.osoenergy.no/'", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } }, "reauth": { - "title": "OSO Energy Auth", - "description": "Generate and enter a new 'Subscription Key' for your account at 'https://portal.osoenergy.no/'.", + "title": "OSO Energy auth", + "description": "Enter a new 'Subscription key' for your account generated at 'https://portal.osoenergy.no/'.", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } @@ -95,11 +95,11 @@ "services": { "get_profile": { "name": "Get heater profile", - "description": "Get the temperature profile of water heater" + "description": "Gets the temperature profile for water heater" }, "set_profile": { "name": "Set heater profile", - "description": "Set the temperature profile of water heater", + "description": "Sets the temperature profile for water heater", "fields": { "hour_00": { "name": "00:00", @@ -201,7 +201,7 @@ }, "set_v40_min": { "name": "Set v40 min", - "description": "Set the minimum quantity of water at 40°C for a heater", + "description": "Sets the minimum quantity of water at 40°C for a heater", "fields": { "v40_min": { "name": "V40 Min", @@ -211,7 +211,7 @@ }, "turn_off": { "name": "Turn off heating", - "description": "Turn off heating for one hour or until min temperature is reached", + "description": "Turns off heating for one hour or until min temperature is reached", "fields": { "until_temp_limit": { "name": "Until temperature limit", @@ -221,7 +221,7 @@ }, "turn_on": { "name": "Turn on heating", - "description": "Turn on heating for one hour or until max temperature is reached", + "description": "Turns on heating for one hour or until max temperature is reached", "fields": { "until_temp_limit": { "name": "Until temperature limit", diff --git a/homeassistant/components/overseerr/__init__.py b/homeassistant/components/overseerr/__init__.py index e4ac712e053..597d44f66cf 100644 --- a/homeassistant/components/overseerr/__init__.py +++ b/homeassistant/components/overseerr/__init__.py @@ -116,15 +116,13 @@ class OverseerrWebhookManager: allowed_methods=[METH_POST], ) if not await self.check_need_change(): + self.entry.runtime_data.push = True return for url in self.webhook_urls: if await self.test_and_set_webhook(url): return LOGGER.info("Failed to register Overseerr webhook") - if ( - cloud.async_active_subscription(self.hass) - and CONF_CLOUDHOOK_URL not in self.entry.data - ): + if cloud.async_active_subscription(self.hass): LOGGER.info("Trying to register a cloudhook URL") url = await _async_cloudhook_generate_url(self.hass, self.entry) if await self.test_and_set_webhook(url): @@ -151,6 +149,7 @@ class OverseerrWebhookManager: webhook_url=url, json_payload=JSON_PAYLOAD, ) + self.entry.runtime_data.push = True return True return False diff --git a/homeassistant/components/overseerr/config_flow.py b/homeassistant/components/overseerr/config_flow.py index 2ad0c8d6d61..9a8bdd1676f 100644 --- a/homeassistant/components/overseerr/config_flow.py +++ b/homeassistant/components/overseerr/config_flow.py @@ -1,14 +1,18 @@ """Config flow for Overseerr.""" +from collections.abc import Mapping from typing import Any -from python_overseerr import OverseerrClient -from python_overseerr.exceptions import OverseerrError +from python_overseerr import ( + OverseerrAuthenticationError, + OverseerrClient, + OverseerrError, +) import voluptuous as vol from yarl import URL from homeassistant.components.webhook import async_generate_id -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_API_KEY, CONF_HOST, @@ -25,6 +29,25 @@ from .const import DOMAIN class OverseerrConfigFlow(ConfigFlow, domain=DOMAIN): """Overseerr config flow.""" + async def _check_connection( + self, host: str, port: int, ssl: bool, api_key: str + ) -> str | None: + """Check if we can connect to the Overseerr instance.""" + client = OverseerrClient( + host, + port, + api_key, + ssl=ssl, + session=async_get_clientsession(self.hass), + ) + try: + await client.get_request_count() + except OverseerrAuthenticationError: + return "invalid_auth" + except OverseerrError: + return "cannot_connect" + return None + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -38,26 +61,32 @@ class OverseerrConfigFlow(ConfigFlow, domain=DOMAIN): self._async_abort_entries_match({CONF_HOST: host}) port = url.port assert port - client = OverseerrClient( - host, - port, - user_input[CONF_API_KEY], - ssl=url.scheme == "https", - session=async_get_clientsession(self.hass), + error = await self._check_connection( + host, port, url.scheme == "https", user_input[CONF_API_KEY] ) - try: - await client.get_request_count() - except OverseerrError: - errors["base"] = "cannot_connect" + if error: + errors["base"] = error else: - return self.async_create_entry( - title="Overseerr", + if self.source == SOURCE_USER: + return self.async_create_entry( + title="Overseerr", + data={ + CONF_HOST: host, + CONF_PORT: port, + CONF_SSL: url.scheme == "https", + CONF_API_KEY: user_input[CONF_API_KEY], + CONF_WEBHOOK_ID: async_generate_id(), + }, + ) + reconfigure_entry = self._get_reconfigure_entry() + return self.async_update_reload_and_abort( + reconfigure_entry, data={ + **reconfigure_entry.data, CONF_HOST: host, CONF_PORT: port, CONF_SSL: url.scheme == "https", CONF_API_KEY: user_input[CONF_API_KEY], - CONF_WEBHOOK_ID: async_generate_id(), }, ) return self.async_show_form( @@ -67,3 +96,41 @@ class OverseerrConfigFlow(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_reauth( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-auth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-auth confirmation.""" + errors: dict[str, str] = {} + if user_input: + entry = self._get_reauth_entry() + error = await self._check_connection( + entry.data[CONF_HOST], + entry.data[CONF_PORT], + entry.data[CONF_SSL], + user_input[CONF_API_KEY], + ) + if error: + errors["base"] = error + else: + return self.async_update_reload_and_abort( + entry, + data={**entry.data, CONF_API_KEY: user_input[CONF_API_KEY]}, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reconfiguration.""" + return await self.async_step_user() diff --git a/homeassistant/components/overseerr/coordinator.py b/homeassistant/components/overseerr/coordinator.py index c8512d764f4..2149dcbec7c 100644 --- a/homeassistant/components/overseerr/coordinator.py +++ b/homeassistant/components/overseerr/coordinator.py @@ -2,13 +2,18 @@ from datetime import timedelta -from python_overseerr import OverseerrClient, RequestCount -from python_overseerr.exceptions import OverseerrConnectionError +from python_overseerr import ( + OverseerrAuthenticationError, + OverseerrClient, + OverseerrConnectionError, + RequestCount, +) from yarl import URL from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONF_SSL from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -42,11 +47,17 @@ class OverseerrCoordinator(DataUpdateCoordinator[RequestCount]): session=async_get_clientsession(hass), ) self.url = URL.build(host=host, port=port, scheme="https" if ssl else "http") + self.push = False async def _async_update_data(self) -> RequestCount: """Fetch data from API endpoint.""" try: return await self.client.get_request_count() + except OverseerrAuthenticationError as err: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + ) from err except OverseerrConnectionError as err: raise UpdateFailed( translation_domain=DOMAIN, diff --git a/homeassistant/components/overseerr/event.py b/homeassistant/components/overseerr/event.py index b1b2efd6ec5..589a80c5404 100644 --- a/homeassistant/components/overseerr/event.py +++ b/homeassistant/components/overseerr/event.py @@ -4,11 +4,13 @@ from dataclasses import dataclass from typing import Any from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import EVENT_KEY +from . import DOMAIN, EVENT_KEY from .coordinator import OverseerrConfigEntry, OverseerrCoordinator from .entity import OverseerrEntity @@ -47,14 +49,23 @@ async def async_setup_entry( """Set up Overseerr sensor entities based on a config entry.""" coordinator = entry.runtime_data - async_add_entities( - OverseerrEvent(coordinator, description) for description in EVENTS + ent_reg = er.async_get(hass) + + event_entities_setup_before = ent_reg.async_get_entity_id( + Platform.EVENT, DOMAIN, f"{entry.entry_id}-media" ) + if coordinator.push or event_entities_setup_before: + async_add_entities( + OverseerrEvent(coordinator, description) for description in EVENTS + ) + class OverseerrEvent(OverseerrEntity, EventEntity): """Defines a Overseerr event entity.""" + entity_description: OverseerrEventEntityDescription + def __init__( self, coordinator: OverseerrCoordinator, @@ -76,7 +87,11 @@ class OverseerrEvent(OverseerrEntity, EventEntity): """Handle incoming event.""" event_type = event["notification_type"].lower() if event_type.split("_")[0] == self.entity_description.key: - self._trigger_event(event_type[6:], event) + self._attr_entity_picture = event.get("image") + self._trigger_event( + event_type[6:], + parse_event(event, self.entity_description.nullable_fields), + ) self.async_write_ha_state() @callback @@ -88,12 +103,23 @@ class OverseerrEvent(OverseerrEntity, EventEntity): @property def available(self) -> bool: """Return True if entity is available.""" - return self._attr_available + return self._attr_available and self.coordinator.push def parse_event(event: dict[str, Any], nullable_fields: list[str]) -> dict[str, Any]: """Parse event.""" event.pop("notification_type") + event.pop("image") for field in nullable_fields: event.pop(field) + if (media := event.get("media")) is not None: + for field in ("status", "status4k"): + media[field] = media[field].lower() + for field in ("tmdb_id", "tvdb_id"): + if (value := media.get(field)) != "": + media[field] = int(value) + else: + media[field] = None + if (request := event.get("request")) is not None: + request["request_id"] = int(request["request_id"]) return event diff --git a/homeassistant/components/overseerr/manifest.json b/homeassistant/components/overseerr/manifest.json index 46ac97073d6..396b9d7000b 100644 --- a/homeassistant/components/overseerr/manifest.json +++ b/homeassistant/components/overseerr/manifest.json @@ -8,6 +8,6 @@ "documentation": "https://www.home-assistant.io/integrations/overseerr", "integration_type": "service", "iot_class": "local_push", - "quality_scale": "bronze", + "quality_scale": "platinum", "requirements": ["python-overseerr==0.6.0"] } diff --git a/homeassistant/components/overseerr/quality_scale.yaml b/homeassistant/components/overseerr/quality_scale.yaml index dfb794476aa..7afbcd6aa07 100644 --- a/homeassistant/components/overseerr/quality_scale.yaml +++ b/homeassistant/components/overseerr/quality_scale.yaml @@ -37,8 +37,8 @@ rules: status: done comment: Handled by the coordinator parallel-updates: done - reauthentication-flow: todo - test-coverage: todo + reauthentication-flow: done + test-coverage: done # Gold devices: done diagnostics: done @@ -50,24 +50,30 @@ rules: status: exempt comment: | This integration does not support discovery. - docs-data-update: todo - docs-examples: todo - docs-known-limitations: todo - docs-supported-devices: todo - docs-supported-functions: todo - docs-troubleshooting: todo - docs-use-cases: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done dynamic-devices: status: exempt comment: | This integration has a fixed single device. - entity-category: todo - entity-device-class: todo - entity-disabled-by-default: todo + entity-category: done + entity-device-class: + status: exempt + comment: | + This integration has no relevant device class to use. + entity-disabled-by-default: + status: exempt + comment: | + This integration has no unpopular entities to disable. entity-translations: done exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/overseerr/strings.json b/homeassistant/components/overseerr/strings.json index 968b8c5b533..5053bcedc41 100644 --- a/homeassistant/components/overseerr/strings.json +++ b/homeassistant/components/overseerr/strings.json @@ -10,13 +10,24 @@ "url": "The URL of the Overseerr instance.", "api_key": "The API key of the Overseerr instance." } + }, + "reauth_confirm": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "api_key": "[%key:component::overseerr::config::step::user::data_description::api_key%]" + } } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_host": "The provided URL is not a valid host." } }, @@ -66,6 +77,9 @@ "connection_error": { "message": "Error connecting to the Overseerr instance: {error}" }, + "auth_error": { + "message": "Invalid API key." + }, "not_loaded": { "message": "{target} is not loaded." }, diff --git a/homeassistant/components/peblar/const.py b/homeassistant/components/peblar/const.py index d7d7c2fa5b5..58fcc9b85da 100644 --- a/homeassistant/components/peblar/const.py +++ b/homeassistant/components/peblar/const.py @@ -23,7 +23,7 @@ PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT = { ChargeLimiter.INSTALLATION_LIMIT: "installation_limit", ChargeLimiter.LOCAL_MODBUS_API: "local_modbus_api", ChargeLimiter.LOCAL_REST_API: "local_rest_api", - ChargeLimiter.LOCAL_SCHEDULED: "local_scheduled", + ChargeLimiter.LOCAL_SCHEDULED_CHARGING: "local_scheduled_charging", ChargeLimiter.OCPP_SMART_CHARGING: "ocpp_smart_charging", ChargeLimiter.OVERCURRENT_PROTECTION: "overcurrent_protection", ChargeLimiter.PHASE_IMBALANCE: "phase_imbalance", diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 859682d3f1d..e2ae96de988 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "platinum", - "requirements": ["peblar==0.3.3"], + "requirements": ["peblar==0.4.0"], "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index fffa2b08d85..a33667fa533 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -96,6 +96,7 @@ "installation_limit": "Installation limit", "local_modbus_api": "Modbus API", "local_rest_api": "REST API", + "local_scheduled_charging": "Scheduled charging", "ocpp_smart_charging": "OCPP smart charging", "overcurrent_protection": "Overcurrent protection", "phase_imbalance": "Phase imbalance", diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py index 9e132da63bc..58c2fbdc899 100644 --- a/homeassistant/components/peblar/update.py +++ b/homeassistant/components/peblar/update.py @@ -37,14 +37,14 @@ DESCRIPTIONS: tuple[PeblarUpdateEntityDescription, ...] = ( key="firmware", device_class=UpdateDeviceClass.FIRMWARE, installed_fn=lambda x: x.current.firmware, - has_fn=lambda x: x.current.firmware is not None, + has_fn=lambda x: x.available.firmware is not None, available_fn=lambda x: x.available.firmware, ), PeblarUpdateEntityDescription( key="customization", translation_key="customization", available_fn=lambda x: x.available.customization, - has_fn=lambda x: x.current.customization is not None, + has_fn=lambda x: x.available.customization is not None, installed_fn=lambda x: x.current.customization, ), ) diff --git a/homeassistant/components/ping/helpers.py b/homeassistant/components/ping/helpers.py index 82ebf4532da..996faa99c5b 100644 --- a/homeassistant/components/ping/helpers.py +++ b/homeassistant/components/ping/helpers.py @@ -160,7 +160,7 @@ class PingDataSubProcess(PingData): ) if pinger: - with suppress(TypeError): + with suppress(TypeError, ProcessLookupError): await pinger.kill() # type: ignore[func-returns-value] del pinger diff --git a/homeassistant/components/powerfox/manifest.json b/homeassistant/components/powerfox/manifest.json index bb72d73b5a8..3938eb01a1b 100644 --- a/homeassistant/components/powerfox/manifest.json +++ b/homeassistant/components/powerfox/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/powerfox", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["powerfox==1.2.0"], + "requirements": ["powerfox==1.2.1"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/powerwall/__init__.py b/homeassistant/components/powerwall/__init__.py index 6a2522ac43b..d84452c0443 100644 --- a/homeassistant/components/powerwall/__init__.py +++ b/homeassistant/components/powerwall/__init__.py @@ -14,6 +14,7 @@ from tesla_powerwall import ( Powerwall, PowerwallUnreachableError, ) +from yarl import URL from homeassistant.components import persistent_notification from homeassistant.config_entries import ConfigEntry @@ -25,7 +26,14 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util.network import is_ip_address -from .const import DOMAIN, POWERWALL_API_CHANGED, POWERWALL_COORDINATOR, UPDATE_INTERVAL +from .const import ( + AUTH_COOKIE_KEY, + CONFIG_ENTRY_COOKIE, + DOMAIN, + POWERWALL_API_CHANGED, + POWERWALL_COORDINATOR, + UPDATE_INTERVAL, +) from .models import ( PowerwallBaseInfo, PowerwallConfigEntry, @@ -52,6 +60,8 @@ class PowerwallDataManager: self, hass: HomeAssistant, power_wall: Powerwall, + cookie_jar: CookieJar, + entry: PowerwallConfigEntry, ip_address: str, password: str | None, runtime_data: PowerwallRuntimeData, @@ -62,6 +72,8 @@ class PowerwallDataManager: self.password = password self.runtime_data = runtime_data self.power_wall = power_wall + self.cookie_jar = cookie_jar + self.entry = entry @property def api_changed(self) -> int: @@ -72,7 +84,9 @@ class PowerwallDataManager: """Recreate the login on auth failure.""" if self.power_wall.is_authenticated(): await self.power_wall.logout() + # Always use the password when recreating the login await self.power_wall.login(self.password or "") + self.save_auth_cookie() async def async_update_data(self) -> PowerwallData: """Fetch data from API endpoint.""" @@ -116,41 +130,74 @@ class PowerwallDataManager: return data raise RuntimeError("unreachable") + @callback + def save_auth_cookie(self) -> None: + """Save the auth cookie.""" + for cookie in self.cookie_jar: + if cookie.key == AUTH_COOKIE_KEY: + self.hass.config_entries.async_update_entry( + self.entry, + data={**self.entry.data, CONFIG_ENTRY_COOKIE: cookie.value}, + ) + _LOGGER.debug("Saved auth cookie") + break + async def async_setup_entry(hass: HomeAssistant, entry: PowerwallConfigEntry) -> bool: """Set up Tesla Powerwall from a config entry.""" ip_address: str = entry.data[CONF_IP_ADDRESS] password: str | None = entry.data.get(CONF_PASSWORD) + + cookie_jar: CookieJar = CookieJar(unsafe=True) + use_auth_cookie: bool = False + # Try to reuse the auth cookie + auth_cookie_value: str | None = entry.data.get(CONFIG_ENTRY_COOKIE) + if auth_cookie_value: + cookie_jar.update_cookies( + {AUTH_COOKIE_KEY: auth_cookie_value}, + URL(f"http://{ip_address}"), + ) + _LOGGER.debug("Using existing auth cookie") + use_auth_cookie = True + http_session = async_create_clientsession( - hass, verify_ssl=False, cookie_jar=CookieJar(unsafe=True) + hass, verify_ssl=False, cookie_jar=cookie_jar ) async with AsyncExitStack() as stack: power_wall = Powerwall(ip_address, http_session=http_session, verify_ssl=False) stack.push_async_callback(power_wall.close) - try: - base_info = await _login_and_fetch_base_info( - power_wall, ip_address, password - ) + for tries in range(2): + try: + base_info = await _login_and_fetch_base_info( + power_wall, ip_address, password, use_auth_cookie + ) - # Cancel closing power_wall on success - stack.pop_all() - except (TimeoutError, PowerwallUnreachableError) as err: - raise ConfigEntryNotReady from err - except MissingAttributeError as err: - # The error might include some important information about what exactly changed. - _LOGGER.error("The powerwall api has changed: %s", str(err)) - persistent_notification.async_create( - hass, API_CHANGED_ERROR_BODY, API_CHANGED_TITLE - ) - return False - except AccessDeniedError as err: - _LOGGER.debug("Authentication failed", exc_info=err) - raise ConfigEntryAuthFailed from err - except ApiError as err: - raise ConfigEntryNotReady from err + # Cancel closing power_wall on success + stack.pop_all() + break + except (TimeoutError, PowerwallUnreachableError) as err: + raise ConfigEntryNotReady from err + except MissingAttributeError as err: + # The error might include some important information about what exactly changed. + _LOGGER.error("The powerwall api has changed: %s", str(err)) + persistent_notification.async_create( + hass, API_CHANGED_ERROR_BODY, API_CHANGED_TITLE + ) + return False + except AccessDeniedError as err: + if use_auth_cookie and tries == 0: + _LOGGER.debug( + "Authentication failed with cookie, retrying with password" + ) + use_auth_cookie = False + continue + _LOGGER.debug("Authentication failed", exc_info=err) + raise ConfigEntryAuthFailed from err + except ApiError as err: + raise ConfigEntryNotReady from err gateway_din = base_info.gateway_din if entry.unique_id is not None and is_ip_address(entry.unique_id): @@ -163,7 +210,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerwallConfigEntry) -> api_instance=power_wall, ) - manager = PowerwallDataManager(hass, power_wall, ip_address, password, runtime_data) + manager = PowerwallDataManager( + hass, + power_wall, + cookie_jar, + entry, + ip_address, + password, + runtime_data, + ) + manager.save_auth_cookie() coordinator = DataUpdateCoordinator( hass, @@ -213,10 +269,11 @@ async def async_migrate_entity_unique_ids( async def _login_and_fetch_base_info( - power_wall: Powerwall, host: str, password: str | None + power_wall: Powerwall, host: str, password: str | None, use_auth_cookie: bool ) -> PowerwallBaseInfo: """Login to the powerwall and fetch the base info.""" - if password is not None: + # Login step is skipped if password is None or if we are using the auth cookie + if not (password is None or use_auth_cookie): await power_wall.login(password) return await _call_base_info(power_wall, host) diff --git a/homeassistant/components/powerwall/config_flow.py b/homeassistant/components/powerwall/config_flow.py index 396ba31b4ee..b082016e562 100644 --- a/homeassistant/components/powerwall/config_flow.py +++ b/homeassistant/components/powerwall/config_flow.py @@ -31,7 +31,7 @@ from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo from homeassistant.util.network import is_ip_address from . import async_last_update_was_successful -from .const import DOMAIN +from .const import CONFIG_ENTRY_COOKIE, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -257,8 +257,10 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN): {CONF_IP_ADDRESS: reauth_entry.data[CONF_IP_ADDRESS], **user_input} ) if not errors: + # We have a new valid connection, old cookie is no longer valid + user_input[CONFIG_ENTRY_COOKIE] = None return self.async_update_reload_and_abort( - reauth_entry, data_updates=user_input + reauth_entry, data_updates={**user_input, CONFIG_ENTRY_COOKIE: None} ) self.context["title_placeholders"] = { diff --git a/homeassistant/components/powerwall/const.py b/homeassistant/components/powerwall/const.py index bb3a6c2355e..186a1221a87 100644 --- a/homeassistant/components/powerwall/const.py +++ b/homeassistant/components/powerwall/const.py @@ -18,3 +18,6 @@ ATTR_IS_ACTIVE = "is_active" MODEL = "PowerWall 2" MANUFACTURER = "Tesla" + +CONFIG_ENTRY_COOKIE = "cookie" +AUTH_COOKIE_KEY = "AuthCookie" diff --git a/homeassistant/components/renson/strings.json b/homeassistant/components/renson/strings.json index b756d16ea79..c81086502ad 100644 --- a/homeassistant/components/renson/strings.json +++ b/homeassistant/components/renson/strings.json @@ -186,46 +186,46 @@ "services": { "set_timer_level": { "name": "Set timer", - "description": "Set the ventilation timer", + "description": "Sets the ventilation timer", "fields": { "timer_level": { "name": "Level", - "description": "Level setting" + "description": "Ventilation level" }, "minutes": { "name": "Time", - "description": "Time of the timer (0 will disable the timer)" + "description": "Duration of the timer (0 will disable the timer)" } } }, "set_breeze": { - "name": "Set breeze", - "description": "Set the breeze function of the ventilation system", + "name": "Set Breeze", + "description": "Sets the Breeze function of the ventilation system", "fields": { "breeze_level": { "name": "[%key:component::renson::services::set_timer_level::fields::timer_level::name%]", - "description": "Ventilation level when breeze function is activated" + "description": "Ventilation level when Breeze function is activated" }, "temperature": { "name": "Temperature", - "description": "Temperature when the breeze function should be activated" + "description": "Temperature when the Breeze function should be activated" }, "activate": { "name": "Activate", - "description": "Activate or disable the breeze feature" + "description": "Activate or disable the Breeze feature" } } }, "set_pollution_settings": { "name": "Set pollution settings", - "description": "Set all the pollution settings of the ventilation system", + "description": "Sets all the pollution settings of the ventilation system", "fields": { "day_pollution_level": { - "name": "Day pollution Level", + "name": "Day pollution level", "description": "Ventilation level when pollution is detected in the day" }, "night_pollution_level": { - "name": "Night pollution Level", + "name": "Night pollution level", "description": "Ventilation level when pollution is detected in the night" }, "humidity_control": { @@ -242,11 +242,11 @@ }, "co2_threshold": { "name": "CO2 threshold", - "description": "Sets the CO2 pollution threshold level in ppm" + "description": "The CO2 pollution threshold level in ppm" }, "co2_hysteresis": { "name": "CO2 hysteresis", - "description": "Sets the CO2 pollution threshold hysteresis level in ppm" + "description": "The CO2 pollution threshold hysteresis level in ppm" } } } diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index bb6b668368b..83729fef3cd 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -19,5 +19,5 @@ "iot_class": "local_push", "loggers": ["reolink_aio"], "quality_scale": "platinum", - "requirements": ["reolink-aio==0.11.6"] + "requirements": ["reolink-aio==0.11.8"] } diff --git a/homeassistant/components/ring/binary_sensor.py b/homeassistant/components/ring/binary_sensor.py index 85a916e95cd..2c458985498 100644 --- a/homeassistant/components/ring/binary_sensor.py +++ b/homeassistant/components/ring/binary_sensor.py @@ -30,6 +30,9 @@ from .entity import ( async_check_create_deprecated, ) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RingBinarySensorEntityDescription( diff --git a/homeassistant/components/ring/button.py b/homeassistant/components/ring/button.py index b9d5cceb373..30600237847 100644 --- a/homeassistant/components/ring/button.py +++ b/homeassistant/components/ring/button.py @@ -12,6 +12,10 @@ from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap +# Coordinator is used to centralize the data updates +# Actions restricted to 1 at a time +PARALLEL_UPDATES = 1 + BUTTON_DESCRIPTION = ButtonEntityDescription( key="open_door", translation_key="open_door" ) diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index ccd91c163d6..c1a4e67ffd4 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -34,6 +34,10 @@ from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingDeviceT, RingEntity, exception_wrap +# Coordinator is used to centralize the data updates +# Actions restricted to 1 at a time +PARALLEL_UPDATES = 1 + FORCE_REFRESH_INTERVAL = timedelta(minutes=3) MOTION_DETECTION_CAPABILITY = "motion_detection" diff --git a/homeassistant/components/ring/event.py b/homeassistant/components/ring/event.py index 71a4bc8aea5..4d7a6277579 100644 --- a/homeassistant/components/ring/event.py +++ b/homeassistant/components/ring/event.py @@ -18,6 +18,9 @@ from . import RingConfigEntry from .coordinator import RingListenCoordinator from .entity import RingBaseEntity, RingDeviceT +# Event entity does not perform updates or actions. +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RingEventEntityDescription(EventEntityDescription, Generic[RingDeviceT]): diff --git a/homeassistant/components/ring/light.py b/homeassistant/components/ring/light.py index 9e29373a3aa..9ae0bac1004 100644 --- a/homeassistant/components/ring/light.py +++ b/homeassistant/components/ring/light.py @@ -18,6 +18,9 @@ from .entity import RingEntity, exception_wrap _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +# Actions restricted to 1 at a time +PARALLEL_UPDATES = 1 # It takes a few seconds for the API to correctly return an update indicating # that the changes have been made. Once we request a change (i.e. a light diff --git a/homeassistant/components/ring/number.py b/homeassistant/components/ring/number.py index 91aabb6c800..b920ff7edc7 100644 --- a/homeassistant/components/ring/number.py +++ b/homeassistant/components/ring/number.py @@ -20,6 +20,10 @@ from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingDeviceT, RingEntity, refresh_after +# Coordinator is used to centralize the data updates +# Actions restricted to 1 at a time +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/ring/sensor.py b/homeassistant/components/ring/sensor.py index dee67882857..cf851a113bc 100644 --- a/homeassistant/components/ring/sensor.py +++ b/homeassistant/components/ring/sensor.py @@ -41,6 +41,9 @@ from .entity import ( async_check_create_deprecated, ) +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/ring/siren.py b/homeassistant/components/ring/siren.py index b1452f7aeb5..05fa07c39eb 100644 --- a/homeassistant/components/ring/siren.py +++ b/homeassistant/components/ring/siren.py @@ -36,6 +36,10 @@ from .entity import ( _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +# Actions restricted to 1 at a time +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class RingSirenEntityDescription( diff --git a/homeassistant/components/ring/switch.py b/homeassistant/components/ring/switch.py index 0ac31fec209..e81d483adf3 100644 --- a/homeassistant/components/ring/switch.py +++ b/homeassistant/components/ring/switch.py @@ -27,6 +27,10 @@ from .entity import ( _LOGGER = logging.getLogger(__name__) +# Coordinator is used to centralize the data updates +# Actions restricted to 1 at a time +PARALLEL_UPDATES = 1 + IN_HOME_CHIME_IS_PRESENT = {v for k, v in DOORBELL_EXISTING_TYPE.items() if k != 2} diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index 43bd92799a8..6a30efd64f8 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -39,7 +39,7 @@ "samsungctl[websocket]==0.7.1", "samsungtvws[async,encrypted]==2.7.2", "wakeonlan==2.1.0", - "async-upnp-client==0.42.0" + "async-upnp-client==0.43.0" ], "ssdp": [ { diff --git a/homeassistant/components/seventeentrack/const.py b/homeassistant/components/seventeentrack/const.py index 6b888590600..19e2d3083c9 100644 --- a/homeassistant/components/seventeentrack/const.py +++ b/homeassistant/components/seventeentrack/const.py @@ -47,6 +47,3 @@ SERVICE_ARCHIVE_PACKAGE = "archive_package" ATTR_PACKAGE_STATE = "package_state" ATTR_PACKAGE_TRACKING_NUMBER = "package_tracking_number" ATTR_CONFIG_ENTRY_ID = "config_entry_id" - - -DEPRECATED_KEY = "deprecated" diff --git a/homeassistant/components/seventeentrack/repairs.py b/homeassistant/components/seventeentrack/repairs.py deleted file mode 100644 index ce72960ea91..00000000000 --- a/homeassistant/components/seventeentrack/repairs.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Repairs for the SeventeenTrack integration.""" - -import voluptuous as vol - -from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResult - -from .const import DEPRECATED_KEY - - -class SensorDeprecationRepairFlow(RepairsFlow): - """Handler for an issue fixing flow.""" - - def __init__(self, entry: ConfigEntry) -> None: - """Create flow.""" - self.entry = entry - - async def async_step_init( - self, user_input: dict[str, str] | None = None - ) -> FlowResult: - """Handle the first step of a fix flow.""" - return await self.async_step_confirm() - - async def async_step_confirm( - self, user_input: dict[str, str] | None = None - ) -> FlowResult: - """Handle the confirm step of a fix flow.""" - if user_input is not None: - data = {**self.entry.data, DEPRECATED_KEY: True} - self.hass.config_entries.async_update_entry(self.entry, data=data) - return self.async_create_entry(data={}) - - return self.async_show_form( - step_id="confirm", - data_schema=vol.Schema({}), - ) - - -async def async_create_fix_flow( - hass: HomeAssistant, issue_id: str, data: dict -) -> RepairsFlow: - """Create flow.""" - if issue_id.startswith("deprecate_sensor_") and ( - entry := hass.config_entries.async_get_entry(data["entry_id"]) - ): - return SensorDeprecationRepairFlow(entry) - return ConfirmRepairFlow() diff --git a/homeassistant/components/seventeentrack/sensor.py b/homeassistant/components/seventeentrack/sensor.py index 4e561a87961..dade9efb67c 100644 --- a/homeassistant/components/seventeentrack/sensor.py +++ b/homeassistant/components/seventeentrack/sensor.py @@ -4,12 +4,10 @@ from __future__ import annotations from typing import Any -from homeassistant.components import persistent_notification from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_LOCATION -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -17,23 +15,13 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import SeventeenTrackCoordinator from .const import ( - ATTR_DESTINATION_COUNTRY, ATTR_INFO_TEXT, - ATTR_ORIGIN_COUNTRY, - ATTR_PACKAGE_TYPE, ATTR_PACKAGES, ATTR_STATUS, ATTR_TIMESTAMP, - ATTR_TRACKING_INFO_LANGUAGE, ATTR_TRACKING_NUMBER, ATTRIBUTION, - DEPRECATED_KEY, DOMAIN, - LOGGER, - NOTIFICATION_DELIVERED_MESSAGE, - NOTIFICATION_DELIVERED_TITLE, - UNIQUE_ID_TEMPLATE, - VALUE_DELIVERED, ) @@ -45,63 +33,12 @@ async def async_setup_entry( """Set up a 17Track sensor entry.""" coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][config_entry.entry_id] - previous_tracking_numbers: set[str] = set() - - # This has been deprecated in 2024.8, will be removed in 2025.2 - @callback - def _async_create_remove_entities(): - if config_entry.data.get(DEPRECATED_KEY): - remove_packages(hass, coordinator.account_id, previous_tracking_numbers) - return - live_tracking_numbers = set(coordinator.data.live_packages.keys()) - - new_tracking_numbers = live_tracking_numbers - previous_tracking_numbers - old_tracking_numbers = previous_tracking_numbers - live_tracking_numbers - - previous_tracking_numbers.update(live_tracking_numbers) - - packages_to_add = [ - coordinator.data.live_packages[tracking_number] - for tracking_number in new_tracking_numbers - ] - - for package_data in coordinator.data.live_packages.values(): - if ( - package_data.status == VALUE_DELIVERED - and not coordinator.show_delivered - ): - old_tracking_numbers.add(package_data.tracking_number) - notify_delivered( - hass, - package_data.friendly_name, - package_data.tracking_number, - ) - - remove_packages(hass, coordinator.account_id, old_tracking_numbers) - - async_add_entities( - SeventeenTrackPackageSensor( - coordinator, - package_data.tracking_number, - ) - for package_data in packages_to_add - if not ( - not coordinator.show_delivered and package_data.status == "Delivered" - ) - ) async_add_entities( SeventeenTrackSummarySensor(status, coordinator) for status, summary_data in coordinator.data.summary.items() ) - if not config_entry.data.get(DEPRECATED_KEY): - deprecate_sensor_issue(hass, config_entry.entry_id) - _async_create_remove_entities() - config_entry.async_on_unload( - coordinator.async_add_listener(_async_create_remove_entities) - ) - class SeventeenTrackSensor(CoordinatorEntity[SeventeenTrackCoordinator], SensorEntity): """Define a 17Track sensor.""" @@ -163,96 +100,3 @@ class SeventeenTrackSummarySensor(SeventeenTrackSensor): for package in packages ] } - - -# The dynamic package sensors have been replaced by the seventeentrack.get_packages service -class SeventeenTrackPackageSensor(SeventeenTrackSensor): - """Define an individual package sensor.""" - - _attr_translation_key = "package" - - def __init__( - self, - coordinator: SeventeenTrackCoordinator, - tracking_number: str, - ) -> None: - """Initialize the sensor.""" - super().__init__(coordinator) - self._tracking_number = tracking_number - self._previous_status = coordinator.data.live_packages[tracking_number].status - self._attr_unique_id = UNIQUE_ID_TEMPLATE.format( - coordinator.account_id, tracking_number - ) - package = coordinator.data.live_packages[tracking_number] - if not (name := package.friendly_name): - name = tracking_number - self._attr_translation_placeholders = {"name": name} - - @property - def available(self) -> bool: - """Return whether the entity is available.""" - return self._tracking_number in self.coordinator.data.live_packages - - @property - def native_value(self) -> StateType: - """Return the state.""" - return self.coordinator.data.live_packages[self._tracking_number].status - - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - package = self.coordinator.data.live_packages[self._tracking_number] - return { - ATTR_DESTINATION_COUNTRY: package.destination_country, - ATTR_INFO_TEXT: package.info_text, - ATTR_TIMESTAMP: package.timestamp, - ATTR_LOCATION: package.location, - ATTR_ORIGIN_COUNTRY: package.origin_country, - ATTR_PACKAGE_TYPE: package.package_type, - ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, - ATTR_TRACKING_NUMBER: package.tracking_number, - } - - -def remove_packages(hass: HomeAssistant, account_id: str, packages: set[str]) -> None: - """Remove entity itself.""" - reg = er.async_get(hass) - for package in packages: - entity_id = reg.async_get_entity_id( - "sensor", - "seventeentrack", - UNIQUE_ID_TEMPLATE.format(account_id, package), - ) - if entity_id: - reg.async_remove(entity_id) - - -def notify_delivered(hass: HomeAssistant, friendly_name: str, tracking_number: str): - """Notify when package is delivered.""" - LOGGER.debug("Package delivered: %s", tracking_number) - - identification = friendly_name if friendly_name else tracking_number - message = NOTIFICATION_DELIVERED_MESSAGE.format(identification, tracking_number) - title = NOTIFICATION_DELIVERED_TITLE.format(identification) - notification_id = NOTIFICATION_DELIVERED_TITLE.format(tracking_number) - - persistent_notification.create( - hass, message, title=title, notification_id=notification_id - ) - - -@callback -def deprecate_sensor_issue(hass: HomeAssistant, entry_id: str) -> None: - """Ensure an issue is registered.""" - ir.async_create_issue( - hass, - DOMAIN, - f"deprecate_sensor_{entry_id}", - breaks_in_ha_version="2025.2.0", - issue_domain=DOMAIN, - is_fixable=True, - is_persistent=True, - translation_key="deprecate_sensor", - severity=ir.IssueSeverity.WARNING, - data={"entry_id": entry_id}, - ) diff --git a/homeassistant/components/seventeentrack/strings.json b/homeassistant/components/seventeentrack/strings.json index bbd01ed3055..982b15ab629 100644 --- a/homeassistant/components/seventeentrack/strings.json +++ b/homeassistant/components/seventeentrack/strings.json @@ -37,19 +37,6 @@ } } }, - "issues": { - "deprecate_sensor": { - "title": "17Track package sensors are being deprecated", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::seventeentrack::issues::deprecate_sensor::title%]", - "description": "17Track package sensors are deprecated and will be removed.\nPlease update your automations and scripts to get data using the `seventeentrack.get_packages` action." - } - } - } - } - }, "entity": { "sensor": { "not_found": { diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index 1adaad8f975..f81ba5ca7f7 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -237,6 +237,7 @@ OTA_SUCCESS = "ota_success" GEN1_RELEASE_URL = "https://shelly-api-docs.shelly.cloud/gen1/#changelog" GEN2_RELEASE_URL = "https://shelly-api-docs.shelly.cloud/gen2/changelog/" +GEN2_BETA_RELEASE_URL = f"{GEN2_RELEASE_URL}#unreleased" DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( MODEL_WALL_DISPLAY, MODEL_MOTION, diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index d450727ead6..81766c65388 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -50,6 +50,7 @@ from .const import ( DOMAIN, FIRMWARE_UNSUPPORTED_ISSUE_ID, GEN1_RELEASE_URL, + GEN2_BETA_RELEASE_URL, GEN2_RELEASE_URL, LOGGER, RPC_INPUTS_EVENTS_TYPES, @@ -453,9 +454,14 @@ def mac_address_from_name(name: str) -> str | None: def get_release_url(gen: int, model: str, beta: bool) -> str | None: """Return release URL or None.""" - if beta or model in DEVICES_WITHOUT_FIRMWARE_CHANGELOG: + if ( + beta and gen in BLOCK_GENERATIONS + ) or model in DEVICES_WITHOUT_FIRMWARE_CHANGELOG: return None + if beta: + return GEN2_BETA_RELEASE_URL + return GEN1_RELEASE_URL if gen in BLOCK_GENERATIONS else GEN2_RELEASE_URL diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index 1a222f1b21f..dee81264fa4 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -6,6 +6,7 @@ from collections.abc import Mapping from typing import Any from pysmlight import Api2 +from pysmlight.const import Devices from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError import voluptuous as vol @@ -51,6 +52,11 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): self.client = Api2(self.host, session=async_get_clientsession(self.hass)) try: + info = await self.client.get_info() + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + if not await self._async_check_auth_required(user_input): return await self._async_complete_entry(user_input) except SmlightConnectionError: @@ -70,6 +76,11 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: + info = await self.client.get_info() + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + if not await self._async_check_auth_required(user_input): return await self._async_complete_entry(user_input) except SmlightConnectionError: @@ -116,6 +127,11 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: user_input[CONF_HOST] = self.host try: + info = await self.client.get_info() + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + if not await self._async_check_auth_required(user_input): return await self._async_complete_entry(user_input) diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 6518cc81989..3a8578c8a59 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_push", - "requirements": ["pysmlight==0.1.5"], + "requirements": ["pysmlight==0.1.6"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index 1e6a533beef..21ff5098d27 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -38,7 +38,8 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "reauth_failed": "[%key:common::config_flow::error::invalid_auth%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unsupported_device": "This device is not yet supported by the SMLIGHT integration" } }, "entity": { diff --git a/homeassistant/components/solax/manifest.json b/homeassistant/components/solax/manifest.json index 925f11e4c65..5509901ae02 100644 --- a/homeassistant/components/solax/manifest.json +++ b/homeassistant/components/solax/manifest.json @@ -1,7 +1,7 @@ { "domain": "solax", "name": "SolaX Power", - "codeowners": ["@squishykid"], + "codeowners": ["@squishykid", "@Darsstar"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/solax", "iot_class": "local_polling", diff --git a/homeassistant/components/sonos/config_flow.py b/homeassistant/components/sonos/config_flow.py index 66fe0f0d78c..057cdb8ec08 100644 --- a/homeassistant/components/sonos/config_flow.py +++ b/homeassistant/components/sonos/config_flow.py @@ -1,7 +1,6 @@ """Config flow for SONOS.""" from collections.abc import Awaitable -import dataclasses from homeassistant.components import ssdp from homeassistant.config_entries import ConfigFlowResult @@ -32,15 +31,15 @@ class SonosDiscoveryFlowHandler(DiscoveryFlowHandler[Awaitable[bool]], domain=DO hostname = discovery_info.hostname if hostname is None or not hostname.lower().startswith("sonos"): return self.async_abort(reason="not_sonos_device") - await self.async_set_unique_id(self._domain, raise_on_progress=False) - host = discovery_info.host - mdns_name = discovery_info.name - properties = discovery_info.properties - boot_seqnum = properties.get("bootseq") - model = properties.get("model") - uid = hostname_to_uid(hostname) if discovery_manager := self.hass.data.get(DATA_SONOS_DISCOVERY_MANAGER): + host = discovery_info.host + mdns_name = discovery_info.name + properties = discovery_info.properties + boot_seqnum = properties.get("bootseq") + model = properties.get("model") + uid = hostname_to_uid(hostname) discovery_manager.async_discovered_player( "Zeroconf", properties, host, uid, boot_seqnum, model, mdns_name ) - return await self.async_step_discovery(dataclasses.asdict(discovery_info)) + await self.async_set_unique_id(self._domain, raise_on_progress=False) + return await self.async_step_discovery({}) diff --git a/homeassistant/components/spotify/coordinator.py b/homeassistant/components/spotify/coordinator.py index 099b1cb3ca8..a86544d883e 100644 --- a/homeassistant/components/spotify/coordinator.py +++ b/homeassistant/components/spotify/coordinator.py @@ -31,6 +31,9 @@ _LOGGER = logging.getLogger(__name__) type SpotifyConfigEntry = ConfigEntry[SpotifyData] +UPDATE_INTERVAL = timedelta(seconds=30) + + @dataclass class SpotifyCoordinatorData: """Class to hold Spotify data.""" @@ -59,7 +62,7 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): hass, _LOGGER, name=DOMAIN, - update_interval=timedelta(seconds=30), + update_interval=UPDATE_INTERVAL, ) self.client = client self._playlist: Playlist | None = None @@ -73,6 +76,7 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): raise UpdateFailed("Error communicating with Spotify API") from err async def _async_update_data(self) -> SpotifyCoordinatorData: + self.update_interval = UPDATE_INTERVAL try: current = await self.client.get_playback() except SpotifyConnectionError as err: @@ -120,6 +124,13 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): ) self._playlist = None self._checked_playlist_id = None + if current.is_playing and current.progress_ms is not None: + assert current.item is not None + time_left = timedelta( + milliseconds=current.item.duration_ms - current.progress_ms + ) + if time_left < UPDATE_INTERVAL: + self.update_interval = time_left + timedelta(seconds=1) return SpotifyCoordinatorData( current_playback=current, position_updated_at=position_updated_at, diff --git a/homeassistant/components/ssdp/manifest.json b/homeassistant/components/ssdp/manifest.json index 2632e37aa98..6e1fba8c3a3 100644 --- a/homeassistant/components/ssdp/manifest.json +++ b/homeassistant/components/ssdp/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["async_upnp_client"], "quality_scale": "internal", - "requirements": ["async-upnp-client==0.42.0"] + "requirements": ["async-upnp-client==0.43.0"] } diff --git a/homeassistant/components/switchbot/quality_scale.yaml b/homeassistant/components/switchbot/quality_scale.yaml new file mode 100644 index 00000000000..3b8976aeb8e --- /dev/null +++ b/homeassistant/components/switchbot/quality_scale.yaml @@ -0,0 +1,96 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: todo + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: todo + parallel-updates: + status: todo + comment: | + set `PARALLEL_UPDATES` in lock.py + reauthentication-flow: todo + test-coverage: + status: todo + comment: | + Consider using snapshots for fixating all the entities a device creates. + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + No network discovery. + discovery: + status: done + comment: | + Can be improved: Device type scan filtering is applied to only show devices that are actually supported. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: done + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + Only one device per config entry. New devices are set up as new entries. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: + status: todo + comment: | + Needs to provide translations for hub2 temperature entity + exception-translations: todo + icon-translations: + status: exempt + comment: | + No custom icons. + reconfiguration-flow: + status: exempt + comment: | + No need for reconfiguration flow. + repair-issues: + status: exempt + comment: | + No repairs/issues. + stale-devices: + status: exempt + comment: | + Device type integration. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index 3619619782e..0b8b8731f8f 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -11,12 +11,15 @@ from synology_dsm.exceptions import SynologyDSMNotLoggedInException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC, CONF_VERIFY_SSL -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from .common import SynoApi, raise_config_entry_auth_error from .const import ( + CONF_BACKUP_PATH, + CONF_BACKUP_SHARE, + DATA_BACKUP_AGENT_LISTENERS, DEFAULT_VERIFY_SSL, DOMAIN, EXCEPTION_DETAILS, @@ -60,6 +63,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.config_entries.async_update_entry( entry, data={**entry.data, CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL} ) + if CONF_BACKUP_SHARE not in entry.options: + hass.config_entries.async_update_entry( + entry, + options={**entry.options, CONF_BACKUP_SHARE: None, CONF_BACKUP_PATH: None}, + ) # Continue setup api = SynoApi(hass, entry) @@ -118,6 +126,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) + if entry.options[CONF_BACKUP_SHARE]: + _async_notify_backup_listeners_soon(hass) + return True @@ -127,9 +138,20 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] await entry_data.api.async_unload() hass.data[DOMAIN].pop(entry.unique_id) + _async_notify_backup_listeners_soon(hass) return unload_ok +def _async_notify_backup_listeners(hass: HomeAssistant) -> None: + for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []): + listener() + + +@callback +def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None: + hass.loop.call_soon(_async_notify_backup_listeners, hass) + + async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/synology_dsm/backup.py b/homeassistant/components/synology_dsm/backup.py new file mode 100644 index 00000000000..eed6af758ba --- /dev/null +++ b/homeassistant/components/synology_dsm/backup.py @@ -0,0 +1,223 @@ +"""Support for Synology DSM backup agents.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator, Callable, Coroutine +import logging +from typing import TYPE_CHECKING, Any + +from aiohttp import StreamReader +from synology_dsm.api.file_station import SynoFileStation +from synology_dsm.exceptions import SynologyDSMAPIErrorException + +from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator +from homeassistant.helpers.json import json_dumps +from homeassistant.util.json import JsonObjectType, json_loads_object + +from .const import ( + CONF_BACKUP_PATH, + CONF_BACKUP_SHARE, + DATA_BACKUP_AGENT_LISTENERS, + DOMAIN, +) +from .models import SynologyDSMData + +LOGGER = logging.getLogger(__name__) + + +async def async_get_backup_agents( + hass: HomeAssistant, +) -> list[BackupAgent]: + """Return a list of backup agents.""" + if not ( + entries := hass.config_entries.async_loaded_entries(DOMAIN) + ) or not hass.data.get(DOMAIN): + LOGGER.debug("No proper config entry found") + return [] + syno_datas: dict[str, SynologyDSMData] = hass.data[DOMAIN] + return [ + SynologyDSMBackupAgent(hass, entry) + for entry in entries + if entry.unique_id is not None + and (syno_data := syno_datas.get(entry.unique_id)) + and syno_data.api.file_station + and entry.options.get(CONF_BACKUP_PATH) + ] + + +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed. + + :return: A function to unregister the listener. + """ + hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener) + + @callback + def remove_listener() -> None: + """Remove the listener.""" + hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener) + if not hass.data[DATA_BACKUP_AGENT_LISTENERS]: + del hass.data[DATA_BACKUP_AGENT_LISTENERS] + + return remove_listener + + +class SynologyDSMBackupAgent(BackupAgent): + """Synology DSM backup agent.""" + + domain = DOMAIN + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + """Initialize the Synology DSM backup agent.""" + super().__init__() + LOGGER.debug("Initializing Synology DSM backup agent for %s", entry.unique_id) + self.name = entry.title + self.path = ( + f"{entry.options[CONF_BACKUP_SHARE]}/{entry.options[CONF_BACKUP_PATH]}" + ) + syno_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + self.api = syno_data.api + + @property + def _file_station(self) -> SynoFileStation: + if TYPE_CHECKING: + # we ensure that file_station exist already in async_get_backup_agents + assert self.api.file_station + return self.api.file_station + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + :return: An async iterator that yields bytes. + """ + try: + resp = await self._file_station.download_file( + path=self.path, + filename=f"{backup_id}.tar", + ) + except SynologyDSMAPIErrorException as err: + raise BackupAgentError("Failed to download backup") from err + + if TYPE_CHECKING: + assert isinstance(resp, StreamReader) + + return ChunkAsyncStreamIterator(resp) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + :param open_stream: A function returning an async iterator that yields bytes. + :param backup: Metadata about the backup that should be uploaded. + """ + + # upload backup.tar file first + try: + await self._file_station.upload_file( + path=self.path, + filename=f"{backup.backup_id}.tar", + source=await open_stream(), + create_parents=True, + ) + except SynologyDSMAPIErrorException as err: + raise BackupAgentError("Failed to upload backup") from err + + # upload backup_meta.json file when backup.tar was successful uploaded + try: + await self._file_station.upload_file( + path=self.path, + filename=f"{backup.backup_id}_meta.json", + source=json_dumps(backup.as_dict()).encode(), + ) + except SynologyDSMAPIErrorException as err: + raise BackupAgentError("Failed to upload backup") from err + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + """ + try: + await self._file_station.delete_file( + path=self.path, filename=f"{backup_id}.tar" + ) + await self._file_station.delete_file( + path=self.path, filename=f"{backup_id}_meta.json" + ) + except SynologyDSMAPIErrorException as err: + raise BackupAgentError("Failed to delete the backup") from err + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + return list((await self._async_list_backups(**kwargs)).values()) + + async def _async_list_backups(self, **kwargs: Any) -> dict[str, AgentBackup]: + """List backups.""" + + async def _download_meta_data(filename: str) -> JsonObjectType: + try: + resp = await self._file_station.download_file( + path=self.path, filename=filename + ) + except SynologyDSMAPIErrorException as err: + raise BackupAgentError("Failed to download meta data") from err + + if TYPE_CHECKING: + assert isinstance(resp, StreamReader) + + try: + return json_loads_object(await resp.read()) + except Exception as err: + raise BackupAgentError("Failed to read meta data") from err + + try: + files = await self._file_station.get_files(path=self.path) + except SynologyDSMAPIErrorException as err: + raise BackupAgentError("Failed to list backups") from err + + if TYPE_CHECKING: + assert files + + backups: dict[str, AgentBackup] = {} + for file in files: + if file.name.endswith("_meta.json"): + try: + meta_data = await _download_meta_data(file.name) + except BackupAgentError as err: + LOGGER.error("Failed to download meta data: %s", err) + continue + agent_backup = AgentBackup.from_dict(meta_data) + backups[agent_backup.backup_id] = agent_backup + return backups + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + backups = await self._async_list_backups() + return backups.get(backup_id) diff --git a/homeassistant/components/synology_dsm/common.py b/homeassistant/components/synology_dsm/common.py index 9a6284eff2b..dfc372e6bde 100644 --- a/homeassistant/components/synology_dsm/common.py +++ b/homeassistant/components/synology_dsm/common.py @@ -14,6 +14,7 @@ from synology_dsm.api.core.upgrade import SynoCoreUpgrade from synology_dsm.api.core.utilization import SynoCoreUtilization from synology_dsm.api.dsm.information import SynoDSMInformation from synology_dsm.api.dsm.network import SynoDSMNetwork +from synology_dsm.api.file_station import SynoFileStation from synology_dsm.api.photos import SynoPhotos from synology_dsm.api.storage.storage import SynoStorage from synology_dsm.api.surveillance_station import SynoSurveillanceStation @@ -62,11 +63,12 @@ class SynoApi: self.config_url = f"http://{entry.data[CONF_HOST]}:{entry.data[CONF_PORT]}" # DSM APIs + self.file_station: SynoFileStation | None = None self.information: SynoDSMInformation | None = None self.network: SynoDSMNetwork | None = None + self.photos: SynoPhotos | None = None self.security: SynoCoreSecurity | None = None self.storage: SynoStorage | None = None - self.photos: SynoPhotos | None = None self.surveillance_station: SynoSurveillanceStation | None = None self.system: SynoCoreSystem | None = None self.upgrade: SynoCoreUpgrade | None = None @@ -74,10 +76,11 @@ class SynoApi: # Should we fetch them self._fetching_entities: dict[str, set[str]] = {} + self._with_file_station = True self._with_information = True + self._with_photos = True self._with_security = True self._with_storage = True - self._with_photos = True self._with_surveillance_station = True self._with_system = True self._with_upgrade = True @@ -157,6 +160,26 @@ class SynoApi: self.dsm.reset(SynoCoreUpgrade.API_KEY) LOGGER.debug("Disabled fetching upgrade data during setup: %s", ex) + # check if file station is used and permitted + self._with_file_station = bool(self.dsm.apis.get(SynoFileStation.LIST_API_KEY)) + if self._with_file_station: + shares: list | None = None + with suppress(*SYNOLOGY_CONNECTION_EXCEPTIONS): + shares = await self.dsm.file.get_shared_folders(only_writable=True) + if not shares: + self._with_file_station = False + self.dsm.reset(SynoFileStation.API_KEY) + LOGGER.debug( + "File Station found, but disabled due to missing user" + " permissions or no writable shared folders available" + ) + + LOGGER.debug( + "State of File Station during setup of '%s': %s", + self._entry.unique_id, + self._with_file_station, + ) + await self._fetch_device_configuration() try: @@ -225,6 +248,15 @@ class SynoApi: self.dsm.reset(self.security) self.security = None + if not self._with_file_station: + LOGGER.debug( + "Disable file station api from being updated or '%s'", + self._entry.unique_id, + ) + if self.file_station: + self.dsm.reset(self.file_station) + self.file_station = None + if not self._with_photos: LOGGER.debug( "Disable photos api from being updated or '%s'", self._entry.unique_id @@ -272,6 +304,12 @@ class SynoApi: self.network = self.dsm.network await self.network.update() + if self._with_file_station: + LOGGER.debug( + "Enable file station api updates for '%s'", self._entry.unique_id + ) + self.file_station = self.dsm.file + if self._with_security: LOGGER.debug("Enable security api updates for '%s'", self._entry.unique_id) self.security = self.dsm.security diff --git a/homeassistant/components/synology_dsm/config_flow.py b/homeassistant/components/synology_dsm/config_flow.py index 03e2eaf8e7b..30f5078f19d 100644 --- a/homeassistant/components/synology_dsm/config_flow.py +++ b/homeassistant/components/synology_dsm/config_flow.py @@ -3,12 +3,14 @@ from __future__ import annotations from collections.abc import Mapping +from contextlib import suppress from ipaddress import ip_address as ip import logging -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from urllib.parse import urlparse from synology_dsm import SynologyDSM +from synology_dsm.api.file_station.models import SynoFileSharedFolder from synology_dsm.exceptions import ( SynologyDSMException, SynologyDSMLogin2SAFailedException, @@ -40,6 +42,12 @@ from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) from homeassistant.helpers.service_info.ssdp import ( ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_SERIAL, @@ -47,12 +55,16 @@ from homeassistant.helpers.service_info.ssdp import ( ) from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from homeassistant.helpers.typing import DiscoveryInfoType, VolDictType +from homeassistant.util import slugify from homeassistant.util.network import is_ip_address as is_ip from .const import ( + CONF_BACKUP_PATH, + CONF_BACKUP_SHARE, CONF_DEVICE_TOKEN, CONF_SNAPSHOT_QUALITY, CONF_VOLUMES, + DEFAULT_BACKUP_PATH, DEFAULT_PORT, DEFAULT_PORT_SSL, DEFAULT_SCAN_INTERVAL, @@ -61,7 +73,9 @@ from .const import ( DEFAULT_USE_SSL, DEFAULT_VERIFY_SSL, DOMAIN, + SYNOLOGY_CONNECTION_EXCEPTIONS, ) +from .models import SynologyDSMData _LOGGER = logging.getLogger(__name__) @@ -131,6 +145,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): self.discovered_conf: dict[str, Any] = {} self.reauth_conf: Mapping[str, Any] = {} self.reauth_reason: str | None = None + self.shares: list[SynoFileSharedFolder] | None = None def _show_form( self, @@ -173,6 +188,8 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): verify_ssl = user_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL) otp_code = user_input.get(CONF_OTP_CODE) friendly_name = user_input.get(CONF_NAME) + backup_path = user_input.get(CONF_BACKUP_PATH) + backup_share = user_input.get(CONF_BACKUP_SHARE) if not port: if use_ssl is True: @@ -209,6 +226,12 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): if errors: return self._show_form(step_id, user_input, errors) + with suppress(*SYNOLOGY_CONNECTION_EXCEPTIONS): + self.shares = await api.file.get_shared_folders(only_writable=True) + + if self.shares and not backup_path: + return await self.async_step_backup_share(user_input) + # unique_id should be serial for services purpose existing_entry = await self.async_set_unique_id(serial, raise_on_progress=False) @@ -221,6 +244,10 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PASSWORD: password, CONF_MAC: api.network.macs, } + config_options = { + CONF_BACKUP_PATH: backup_path, + CONF_BACKUP_SHARE: backup_share, + } if otp_code: config_data[CONF_DEVICE_TOKEN] = api.device_token if user_input.get(CONF_DISKS): @@ -233,10 +260,12 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): "reauth_successful" if self.reauth_conf else "reconfigure_successful" ) return self.async_update_reload_and_abort( - existing_entry, data=config_data, reason=reason + existing_entry, data=config_data, options=config_options, reason=reason ) - return self.async_create_entry(title=friendly_name or host, data=config_data) + return self.async_create_entry( + title=friendly_name or host, data=config_data, options=config_options + ) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -368,6 +397,43 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user(user_input) + async def async_step_backup_share( + self, user_input: dict[str, Any], errors: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Select backup location.""" + if TYPE_CHECKING: + assert self.shares is not None + + if not self.saved_user_input: + self.saved_user_input = user_input + + if CONF_BACKUP_PATH not in user_input and CONF_BACKUP_SHARE not in user_input: + return self.async_show_form( + step_id="backup_share", + data_schema=vol.Schema( + { + vol.Required(CONF_BACKUP_SHARE): SelectSelector( + SelectSelectorConfig( + options=[ + SelectOptionDict(value=s.path, label=s.name) + for s in self.shares + ], + mode=SelectSelectorMode.DROPDOWN, + ), + ), + vol.Required( + CONF_BACKUP_PATH, + default=f"{DEFAULT_BACKUP_PATH}_{slugify(self.hass.config.location_name)}", + ): str, + } + ), + ) + + user_input = {**self.saved_user_input, **user_input} + self.saved_user_input = {} + + return await self.async_step_user(user_input) + def _async_get_existing_entry(self, discovered_mac: str) -> ConfigEntry | None: """See if we already have a configured NAS with this MAC address.""" for entry in self._async_current_entries(): @@ -388,6 +454,8 @@ class SynologyDSMOptionsFlowHandler(OptionsFlow): if user_input is not None: return self.async_create_entry(title="", data=user_input) + syno_data: SynologyDSMData = self.hass.data[DOMAIN][self.config_entry.unique_id] + data_schema = vol.Schema( { vol.Required( @@ -404,6 +472,36 @@ class SynologyDSMOptionsFlowHandler(OptionsFlow): ): vol.All(vol.Coerce(int), vol.Range(min=0, max=2)), } ) + + shares: list[SynoFileSharedFolder] | None = None + if syno_data.api.file_station: + with suppress(*SYNOLOGY_CONNECTION_EXCEPTIONS): + shares = await syno_data.api.file_station.get_shared_folders( + only_writable=True + ) + + if shares: + data_schema = data_schema.extend( + { + vol.Required( + CONF_BACKUP_SHARE, + default=self.config_entry.options[CONF_BACKUP_SHARE], + ): SelectSelector( + SelectSelectorConfig( + options=[ + SelectOptionDict(value=s.path, label=s.name) + for s in shares + ], + mode=SelectSelectorMode.DROPDOWN, + ), + ), + vol.Required( + CONF_BACKUP_PATH, + default=self.config_entry.options[CONF_BACKUP_PATH], + ): str, + } + ) + return self.async_show_form(step_id="init", data_schema=data_schema) diff --git a/homeassistant/components/synology_dsm/const.py b/homeassistant/components/synology_dsm/const.py index e6367458578..dbee85b99d6 100644 --- a/homeassistant/components/synology_dsm/const.py +++ b/homeassistant/components/synology_dsm/const.py @@ -2,6 +2,8 @@ from __future__ import annotations +from collections.abc import Callable + from aiohttp import ClientTimeout from synology_dsm.api.surveillance_station.const import SNAPSHOT_PROFILE_BALANCED from synology_dsm.exceptions import ( @@ -15,8 +17,12 @@ from synology_dsm.exceptions import ( ) from homeassistant.const import Platform +from homeassistant.util.hass_dict import HassKey DOMAIN = "synology_dsm" +DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey( + f"{DOMAIN}_backup_agent_listeners" +) ATTRIBUTION = "Data provided by Synology" PLATFORMS = [ Platform.BINARY_SENSOR, @@ -34,6 +40,8 @@ CONF_SERIAL = "serial" CONF_VOLUMES = "volumes" CONF_DEVICE_TOKEN = "device_token" CONF_SNAPSHOT_QUALITY = "snap_profile_type" +CONF_BACKUP_SHARE = "backup_share" +CONF_BACKUP_PATH = "backup_path" DEFAULT_USE_SSL = True DEFAULT_VERIFY_SSL = False @@ -43,6 +51,7 @@ DEFAULT_PORT_SSL = 5001 DEFAULT_SCAN_INTERVAL = 15 # min DEFAULT_TIMEOUT = ClientTimeout(total=60, connect=15) DEFAULT_SNAPSHOT_QUALITY = SNAPSHOT_PROFILE_BALANCED +DEFAULT_BACKUP_PATH = "ha_backup" ENTITY_UNIT_LOAD = "load" diff --git a/homeassistant/components/synology_dsm/strings.json b/homeassistant/components/synology_dsm/strings.json index 0f8ea594732..3d64f908256 100644 --- a/homeassistant/components/synology_dsm/strings.json +++ b/homeassistant/components/synology_dsm/strings.json @@ -21,6 +21,17 @@ "otp_code": "Code" } }, + "backup_share": { + "title": "Synology DSM: Backup location", + "data": { + "backup_share": "Shared folder", + "backup_path": "Path" + }, + "data_description": { + "backup_share": "Select the shared folder, where the automatic Home-Assistant backup should be stored.", + "backup_path": "Define the path on the selected shared folder (will automatically be created, if not exist)." + } + }, "link": { "description": "Do you want to set up {name} ({host})?", "data": { diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index cc5dee77617..3e42e33489f 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -3,14 +3,15 @@ from datetime import timedelta import logging -import requests.exceptions +import PyTado +import PyTado.exceptions +from PyTado.interface import Tado from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType from .const import ( @@ -21,11 +22,9 @@ from .const import ( CONST_OVERLAY_TADO_OPTIONS, DOMAIN, ) +from .coordinator import TadoDataUpdateCoordinator, TadoMobileDeviceUpdateCoordinator +from .models import TadoData from .services import setup_services -from .tado_connector import TadoConnector - -_LOGGER = logging.getLogger(__name__) - PLATFORMS = [ Platform.BINARY_SENSOR, @@ -41,16 +40,17 @@ SCAN_MOBILE_DEVICE_INTERVAL = timedelta(seconds=30) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +_LOGGER = logging.getLogger(__name__) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Tado.""" setup_services(hass) - return True -type TadoConfigEntry = ConfigEntry[TadoConnector] +type TadoConfigEntry = ConfigEntry[TadoData] async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: @@ -58,53 +58,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool _async_import_options_from_data_if_missing(hass, entry) - username = entry.data[CONF_USERNAME] - password = entry.data[CONF_PASSWORD] - fallback = entry.options.get(CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT) - - tadoconnector = TadoConnector(hass, username, password, fallback) - + _LOGGER.debug("Setting up Tado connection") try: - await hass.async_add_executor_job(tadoconnector.setup) - except KeyError: - _LOGGER.error("Failed to login to tado") - return False - except RuntimeError as exc: - _LOGGER.error("Failed to setup tado: %s", exc) - return False - except requests.exceptions.Timeout as ex: - raise ConfigEntryNotReady from ex - except requests.exceptions.HTTPError as ex: - if ex.response.status_code > 400 and ex.response.status_code < 500: - _LOGGER.error("Failed to login to tado: %s", ex) - return False - raise ConfigEntryNotReady from ex - - # Do first update - await hass.async_add_executor_job(tadoconnector.update) - - # Poll for updates in the background - entry.async_on_unload( - async_track_time_interval( - hass, - lambda now: tadoconnector.update(), - SCAN_INTERVAL, + tado = await hass.async_add_executor_job( + Tado, + entry.data[CONF_USERNAME], + entry.data[CONF_PASSWORD], ) + except PyTado.exceptions.TadoWrongCredentialsException as err: + raise ConfigEntryError(f"Invalid Tado credentials. Error: {err}") from err + except PyTado.exceptions.TadoException as err: + raise ConfigEntryNotReady(f"Error during Tado setup: {err}") from err + _LOGGER.debug( + "Tado connection established for username: %s", entry.data[CONF_USERNAME] ) - entry.async_on_unload( - async_track_time_interval( - hass, - lambda now: tadoconnector.update_mobile_devices(), - SCAN_MOBILE_DEVICE_INTERVAL, - ) - ) + coordinator = TadoDataUpdateCoordinator(hass, entry, tado) + await coordinator.async_config_entry_first_refresh() - entry.async_on_unload(entry.add_update_listener(_async_update_listener)) - - entry.runtime_data = tadoconnector + mobile_coordinator = TadoMobileDeviceUpdateCoordinator(hass, entry, tado) + await mobile_coordinator.async_config_entry_first_refresh() + entry.runtime_data = TadoData(coordinator, mobile_coordinator) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) return True @@ -126,7 +103,7 @@ def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: Confi hass.config_entries.async_update_entry(entry, options=options) -async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: TadoConfigEntry): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/tado/binary_sensor.py b/homeassistant/components/tado/binary_sensor.py index 25c1c801155..c969ea34f42 100644 --- a/homeassistant/components/tado/binary_sensor.py +++ b/homeassistant/components/tado/binary_sensor.py @@ -13,21 +13,19 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntityDescription, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import TadoConfigEntry from .const import ( - SIGNAL_TADO_UPDATE_RECEIVED, TYPE_AIR_CONDITIONING, TYPE_BATTERY, TYPE_HEATING, TYPE_HOT_WATER, TYPE_POWER, ) +from .coordinator import TadoDataUpdateCoordinator from .entity import TadoDeviceEntity, TadoZoneEntity -from .tado_connector import TadoConnector _LOGGER = logging.getLogger(__name__) @@ -121,7 +119,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado sensor platform.""" - tado = entry.runtime_data + tado = entry.runtime_data.coordinator devices = tado.devices zones = tado.zones entities: list[BinarySensorEntity] = [] @@ -164,43 +162,23 @@ class TadoDeviceBinarySensor(TadoDeviceEntity, BinarySensorEntity): def __init__( self, - tado: TadoConnector, + coordinator: TadoDataUpdateCoordinator, device_info: dict[str, Any], entity_description: TadoBinarySensorEntityDescription, ) -> None: """Initialize of the Tado Sensor.""" self.entity_description = entity_description - self._tado = tado - super().__init__(device_info) + super().__init__(device_info, coordinator) self._attr_unique_id = ( - f"{entity_description.key} {self.device_id} {tado.home_id}" + f"{entity_description.key} {self.device_id} {coordinator.home_id}" ) - async def async_added_to_hass(self) -> None: - """Register for sensor updates.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format( - self._tado.home_id, "device", self.device_id - ), - self._async_update_callback, - ) - ) - self._async_update_device_data() - @callback - def _async_update_callback(self) -> None: - """Update and write state.""" - self._async_update_device_data() - self.async_write_ha_state() - - @callback - def _async_update_device_data(self) -> None: - """Handle update callbacks.""" + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" try: - self._device_info = self._tado.data["device"][self.device_id] + self._device_info = self.coordinator.data["device"][self.device_id] except KeyError: return @@ -209,6 +187,7 @@ class TadoDeviceBinarySensor(TadoDeviceEntity, BinarySensorEntity): self._attr_extra_state_attributes = self.entity_description.attributes_fn( self._device_info ) + super()._handle_coordinator_update() class TadoZoneBinarySensor(TadoZoneEntity, BinarySensorEntity): @@ -218,42 +197,24 @@ class TadoZoneBinarySensor(TadoZoneEntity, BinarySensorEntity): def __init__( self, - tado: TadoConnector, + coordinator: TadoDataUpdateCoordinator, zone_name: str, zone_id: int, entity_description: TadoBinarySensorEntityDescription, ) -> None: """Initialize of the Tado Sensor.""" self.entity_description = entity_description - self._tado = tado - super().__init__(zone_name, tado.home_id, zone_id) + super().__init__(zone_name, coordinator.home_id, zone_id, coordinator) - self._attr_unique_id = f"{entity_description.key} {zone_id} {tado.home_id}" - - async def async_added_to_hass(self) -> None: - """Register for sensor updates.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format( - self._tado.home_id, "zone", self.zone_id - ), - self._async_update_callback, - ) + self._attr_unique_id = ( + f"{entity_description.key} {zone_id} {coordinator.home_id}" ) - self._async_update_zone_data() @callback - def _async_update_callback(self) -> None: - """Update and write state.""" - self._async_update_zone_data() - self.async_write_ha_state() - - @callback - def _async_update_zone_data(self) -> None: - """Handle update callbacks.""" + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" try: - tado_zone_data = self._tado.data["zone"][self.zone_id] + tado_zone_data = self.coordinator.data["zone"][self.zone_id] except KeyError: return @@ -262,3 +223,4 @@ class TadoZoneBinarySensor(TadoZoneEntity, BinarySensorEntity): self._attr_extra_state_attributes = self.entity_description.attributes_fn( tado_zone_data ) + super()._handle_coordinator_update() diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index 5a81e951293..c8eaec76255 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -26,11 +26,10 @@ from homeassistant.components.climate import ( from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv, entity_platform -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType -from . import TadoConfigEntry, TadoConnector +from . import TadoConfigEntry from .const import ( CONST_EXCLUSIVE_OVERLAY_GROUP, CONST_FAN_AUTO, @@ -50,7 +49,6 @@ from .const import ( HA_TO_TADO_HVAC_MODE_MAP, ORDERED_KNOWN_TADO_MODES, PRESET_AUTO, - SIGNAL_TADO_UPDATE_RECEIVED, SUPPORT_PRESET_AUTO, SUPPORT_PRESET_MANUAL, TADO_DEFAULT_MAX_TEMP, @@ -73,6 +71,7 @@ from .const import ( TYPE_AIR_CONDITIONING, TYPE_HEATING, ) +from .coordinator import TadoDataUpdateCoordinator from .entity import TadoZoneEntity from .helper import decide_duration, decide_overlay_mode, generate_supported_fanmodes @@ -105,8 +104,8 @@ async def async_setup_entry( ) -> None: """Set up the Tado climate platform.""" - tado = entry.runtime_data - entities = await hass.async_add_executor_job(_generate_entities, tado) + tado = entry.runtime_data.coordinator + entities = await _generate_entities(tado) platform = entity_platform.async_get_current_platform() @@ -125,12 +124,12 @@ async def async_setup_entry( async_add_entities(entities, True) -def _generate_entities(tado: TadoConnector) -> list[TadoClimate]: +async def _generate_entities(tado: TadoDataUpdateCoordinator) -> list[TadoClimate]: """Create all climate entities.""" entities = [] for zone in tado.zones: if zone["type"] in [TYPE_HEATING, TYPE_AIR_CONDITIONING]: - entity = create_climate_entity( + entity = await create_climate_entity( tado, zone["name"], zone["id"], zone["devices"][0] ) if entity: @@ -138,11 +137,11 @@ def _generate_entities(tado: TadoConnector) -> list[TadoClimate]: return entities -def create_climate_entity( - tado: TadoConnector, name: str, zone_id: int, device_info: dict +async def create_climate_entity( + tado: TadoDataUpdateCoordinator, name: str, zone_id: int, device_info: dict ) -> TadoClimate | None: """Create a Tado climate entity.""" - capabilities = tado.get_capabilities(zone_id) + capabilities = await tado.get_capabilities(zone_id) _LOGGER.debug("Capabilities for zone %s: %s", zone_id, capabilities) zone_type = capabilities["type"] @@ -243,6 +242,8 @@ def create_climate_entity( cool_max_temp = float(cool_temperatures["celsius"]["max"]) cool_step = cool_temperatures["celsius"].get("step", PRECISION_TENTHS) + auto_geofencing_supported = await tado.get_auto_geofencing_supported() + return TadoClimate( tado, name, @@ -251,6 +252,8 @@ def create_climate_entity( supported_hvac_modes, support_flags, device_info, + capabilities, + auto_geofencing_supported, heat_min_temp, heat_max_temp, heat_step, @@ -272,13 +275,15 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): def __init__( self, - tado: TadoConnector, + coordinator: TadoDataUpdateCoordinator, zone_name: str, zone_id: int, zone_type: str, supported_hvac_modes: list[HVACMode], support_flags: ClimateEntityFeature, device_info: dict[str, str], + capabilities: dict[str, str], + auto_geofencing_supported: bool, heat_min_temp: float | None = None, heat_max_temp: float | None = None, heat_step: float | None = None, @@ -289,13 +294,13 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): supported_swing_modes: list[str] | None = None, ) -> None: """Initialize of Tado climate entity.""" - self._tado = tado - super().__init__(zone_name, tado.home_id, zone_id) + self._tado = coordinator + super().__init__(zone_name, coordinator.home_id, zone_id, coordinator) self.zone_id = zone_id self.zone_type = zone_type - self._attr_unique_id = f"{zone_type} {zone_id} {tado.home_id}" + self._attr_unique_id = f"{zone_type} {zone_id} {coordinator.home_id}" self._device_info = device_info self._device_id = self._device_info["shortSerialNo"] @@ -327,36 +332,61 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._current_tado_vertical_swing = TADO_SWING_OFF self._current_tado_horizontal_swing = TADO_SWING_OFF - capabilities = tado.get_capabilities(zone_id) self._current_tado_capabilities = capabilities + self._auto_geofencing_supported = auto_geofencing_supported self._tado_zone_data: PyTado.TadoZone = {} self._tado_geofence_data: dict[str, str] | None = None self._tado_zone_temp_offset: dict[str, Any] = {} - self._async_update_home_data() self._async_update_zone_data() - async def async_added_to_hass(self) -> None: - """Register for sensor updates.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format(self._tado.home_id, "home", "data"), - self._async_update_home_callback, - ) - ) + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._async_update_zone_data() + super()._handle_coordinator_update() - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format( - self._tado.home_id, "zone", self.zone_id - ), - self._async_update_zone_callback, + @callback + def _async_update_zone_data(self) -> None: + """Load tado data into zone.""" + self._tado_geofence_data = self._tado.data["geofence"] + self._tado_zone_data = self._tado.data["zone"][self.zone_id] + + # Assign offset values to mapped attributes + for offset_key, attr in TADO_TO_HA_OFFSET_MAP.items(): + if ( + self._device_id in self._tado.data["device"] + and offset_key + in self._tado.data["device"][self._device_id][TEMP_OFFSET] + ): + self._tado_zone_temp_offset[attr] = self._tado.data["device"][ + self._device_id + ][TEMP_OFFSET][offset_key] + + self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode + self._current_tado_hvac_action = self._tado_zone_data.current_hvac_action + + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._current_tado_fan_level = self._tado_zone_data.current_fan_level + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + self._current_tado_fan_speed = self._tado_zone_data.current_fan_speed + if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): + self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + self._current_tado_vertical_swing = ( + self._tado_zone_data.current_vertical_swing_mode ) - ) + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + self._current_tado_horizontal_swing = ( + self._tado_zone_data.current_horizontal_swing_mode + ) + + @callback + def _async_update_zone_callback(self) -> None: + """Load tado data and update state.""" + self._async_update_zone_data() @property def current_humidity(self) -> int | None: @@ -401,12 +431,13 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): return FAN_AUTO return None - def set_fan_mode(self, fan_mode: str) -> None: + async def async_set_fan_mode(self, fan_mode: str) -> None: """Turn fan on/off.""" if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): - self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP_LEGACY[fan_mode]) + await self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP_LEGACY[fan_mode]) elif self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): - self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) + await self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) + await self.coordinator.async_request_refresh() @property def preset_mode(self) -> str: @@ -425,13 +456,14 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): @property def preset_modes(self) -> list[str]: """Return a list of available preset modes.""" - if self._tado.get_auto_geofencing_supported(): + if self._auto_geofencing_supported: return SUPPORT_PRESET_AUTO return SUPPORT_PRESET_MANUAL - def set_preset_mode(self, preset_mode: str) -> None: + async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - self._tado.set_presence(preset_mode) + await self._tado.set_presence(preset_mode) + await self.coordinator.async_request_refresh() @property def target_temperature_step(self) -> float | None: @@ -449,7 +481,7 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): # the device is switching states return self._tado_zone_data.target_temp or self._tado_zone_data.current_temp - def set_timer( + async def set_timer( self, temperature: float, time_period: int | None = None, @@ -457,14 +489,15 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): ): """Set the timer on the entity, and temperature if supported.""" - self._control_hvac( + await self._control_hvac( hvac_mode=CONST_MODE_HEAT, target_temp=temperature, duration=time_period, overlay_mode=requested_overlay, ) + await self.coordinator.async_request_refresh() - def set_temp_offset(self, offset: float) -> None: + async def set_temp_offset(self, offset: float) -> None: """Set offset on the entity.""" _LOGGER.debug( @@ -474,8 +507,9 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): ) self._tado.set_temperature_offset(self._device_id, offset) + await self.coordinator.async_request_refresh() - def set_temperature(self, **kwargs: Any) -> None: + async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: return @@ -485,15 +519,21 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): CONST_MODE_AUTO, CONST_MODE_SMART_SCHEDULE, ): - self._control_hvac(target_temp=temperature) + await self._control_hvac(target_temp=temperature) + await self.coordinator.async_request_refresh() return new_hvac_mode = CONST_MODE_COOL if self._ac_device else CONST_MODE_HEAT - self._control_hvac(target_temp=temperature, hvac_mode=new_hvac_mode) + await self._control_hvac(target_temp=temperature, hvac_mode=new_hvac_mode) + await self.coordinator.async_request_refresh() - def set_hvac_mode(self, hvac_mode: HVACMode) -> None: + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" - self._control_hvac(hvac_mode=HA_TO_TADO_HVAC_MODE_MAP[hvac_mode]) + _LOGGER.debug( + "Setting new hvac mode for device %s to %s", self._device_id, hvac_mode + ) + await self._control_hvac(hvac_mode=HA_TO_TADO_HVAC_MODE_MAP[hvac_mode]) + await self.coordinator.async_request_refresh() @property def available(self) -> bool: @@ -559,7 +599,7 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): ) return state_attr - def set_swing_mode(self, swing_mode: str) -> None: + async def async_set_swing_mode(self, swing_mode: str) -> None: """Set swing modes for the device.""" vertical_swing = None horizontal_swing = None @@ -591,62 +631,12 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): horizontal_swing = TADO_SWING_ON - self._control_hvac( + await self._control_hvac( swing_mode=swing, vertical_swing=vertical_swing, horizontal_swing=horizontal_swing, ) - - @callback - def _async_update_zone_data(self) -> None: - """Load tado data into zone.""" - self._tado_zone_data = self._tado.data["zone"][self.zone_id] - - # Assign offset values to mapped attributes - for offset_key, attr in TADO_TO_HA_OFFSET_MAP.items(): - if ( - self._device_id in self._tado.data["device"] - and offset_key - in self._tado.data["device"][self._device_id][TEMP_OFFSET] - ): - self._tado_zone_temp_offset[attr] = self._tado.data["device"][ - self._device_id - ][TEMP_OFFSET][offset_key] - - self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode - self._current_tado_hvac_action = self._tado_zone_data.current_hvac_action - - if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): - self._current_tado_fan_level = self._tado_zone_data.current_fan_level - if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): - self._current_tado_fan_speed = self._tado_zone_data.current_fan_speed - if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): - self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode - if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): - self._current_tado_vertical_swing = ( - self._tado_zone_data.current_vertical_swing_mode - ) - if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): - self._current_tado_horizontal_swing = ( - self._tado_zone_data.current_horizontal_swing_mode - ) - - @callback - def _async_update_zone_callback(self) -> None: - """Load tado data and update state.""" - self._async_update_zone_data() - self.async_write_ha_state() - - @callback - def _async_update_home_data(self) -> None: - """Load tado geofencing data into zone.""" - self._tado_geofence_data = self._tado.data["geofence"] - - @callback - def _async_update_home_callback(self) -> None: - """Load tado data and update state.""" - self._async_update_home_data() - self.async_write_ha_state() + await self.coordinator.async_request_refresh() def _normalize_target_temp_for_hvac_mode(self) -> None: def adjust_temp(min_temp, max_temp) -> float | None: @@ -665,7 +655,7 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): elif self._current_tado_hvac_mode == CONST_MODE_HEAT: self._target_temp = adjust_temp(self._heat_min_temp, self._heat_max_temp) - def _control_hvac( + async def _control_hvac( self, hvac_mode: str | None = None, target_temp: float | None = None, @@ -712,7 +702,9 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): _LOGGER.debug( "Switching to OFF for zone %s (%d)", self.zone_name, self.zone_id ) - self._tado.set_zone_off(self.zone_id, CONST_OVERLAY_MANUAL, self.zone_type) + await self._tado.set_zone_off( + self.zone_id, CONST_OVERLAY_MANUAL, self.zone_type + ) return if self._current_tado_hvac_mode == CONST_MODE_SMART_SCHEDULE: @@ -721,17 +713,17 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self.zone_name, self.zone_id, ) - self._tado.reset_zone_overlay(self.zone_id) + await self._tado.reset_zone_overlay(self.zone_id) return overlay_mode = decide_overlay_mode( - tado=self._tado, + coordinator=self._tado, duration=duration, overlay_mode=overlay_mode, zone_id=self.zone_id, ) duration = decide_duration( - tado=self._tado, + coordinator=self._tado, duration=duration, zone_id=self.zone_id, overlay_mode=overlay_mode, @@ -785,7 +777,7 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): ): swing = self._current_tado_swing_mode - self._tado.set_zone_overlay( + await self._tado.set_zone_overlay( zone_id=self.zone_id, overlay_mode=overlay_mode, # What to do when the period ends temperature=temperature_to_send, @@ -800,18 +792,23 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): ) def _is_valid_setting_for_hvac_mode(self, setting: str) -> bool: - return ( - self._current_tado_capabilities.get(self._current_tado_hvac_mode, {}).get( - setting - ) - is not None + """Determine if a setting is valid for the current HVAC mode.""" + capabilities: str | dict[str, str] = self._current_tado_capabilities.get( + self._current_tado_hvac_mode, {} ) + if isinstance(capabilities, dict): + return capabilities.get(setting) is not None + return False def _is_current_setting_supported_by_current_hvac_mode( self, setting: str, current_state: str | None ) -> bool: - if self._is_valid_setting_for_hvac_mode(setting): - return current_state in self._current_tado_capabilities[ - self._current_tado_hvac_mode - ].get(setting, []) + """Determine if the current setting is supported by the current HVAC mode.""" + capabilities: str | dict[str, str] = self._current_tado_capabilities.get( + self._current_tado_hvac_mode, {} + ) + if isinstance(capabilities, dict) and self._is_valid_setting_for_hvac_mode( + setting + ): + return current_state in capabilities.get(setting, []) return False diff --git a/homeassistant/components/tado/coordinator.py b/homeassistant/components/tado/coordinator.py new file mode 100644 index 00000000000..ddec9e7f292 --- /dev/null +++ b/homeassistant/components/tado/coordinator.py @@ -0,0 +1,391 @@ +"""Coordinator for the Tado integration.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +import logging +from typing import Any + +from PyTado.interface import Tado +from requests import RequestException + +from homeassistant.components.climate import PRESET_AWAY, PRESET_HOME +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + CONF_FALLBACK, + CONST_OVERLAY_TADO_DEFAULT, + DOMAIN, + INSIDE_TEMPERATURE_MEASUREMENT, + PRESET_AUTO, + TEMP_OFFSET, +) + +_LOGGER = logging.getLogger(__name__) + +MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=4) +SCAN_INTERVAL = timedelta(minutes=5) +SCAN_MOBILE_DEVICE_INTERVAL = timedelta(seconds=30) + +type TadoConfigEntry = ConfigEntry[TadoDataUpdateCoordinator] + + +class TadoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): + """Class to manage API calls from and to Tado via PyTado.""" + + tado: Tado + home_id: int + home_name: str + config_entry: TadoConfigEntry + + def __init__( + self, + hass: HomeAssistant, + entry: ConfigEntry, + tado: Tado, + debug: bool = False, + ) -> None: + """Initialize the Tado data update coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self._tado = tado + self._username = entry.data[CONF_USERNAME] + self._password = entry.data[CONF_PASSWORD] + self._fallback = entry.options.get(CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT) + self._debug = debug + + self.home_id: int + self.home_name: str + self.zones: list[dict[Any, Any]] = [] + self.devices: list[dict[Any, Any]] = [] + self.data: dict[str, dict] = { + "device": {}, + "weather": {}, + "geofence": {}, + "zone": {}, + } + + @property + def fallback(self) -> str: + """Return fallback flag to Smart Schedule.""" + return self._fallback + + async def _async_update_data(self) -> dict[str, dict]: + """Fetch the (initial) latest data from Tado.""" + + try: + _LOGGER.debug("Preloading home data") + tado_home_call = await self.hass.async_add_executor_job(self._tado.get_me) + _LOGGER.debug("Preloading zones and devices") + self.zones = await self.hass.async_add_executor_job(self._tado.get_zones) + self.devices = await self.hass.async_add_executor_job( + self._tado.get_devices + ) + except RequestException as err: + raise UpdateFailed(f"Error during Tado setup: {err}") from err + + tado_home = tado_home_call["homes"][0] + self.home_id = tado_home["id"] + self.home_name = tado_home["name"] + + devices = await self._async_update_devices() + zones = await self._async_update_zones() + home = await self._async_update_home() + + self.data["device"] = devices + self.data["zone"] = zones + self.data["weather"] = home["weather"] + self.data["geofence"] = home["geofence"] + + return self.data + + async def _async_update_devices(self) -> dict[str, dict]: + """Update the device data from Tado.""" + + try: + devices = await self.hass.async_add_executor_job(self._tado.get_devices) + except RequestException as err: + _LOGGER.error("Error updating Tado devices: %s", err) + raise UpdateFailed(f"Error updating Tado devices: {err}") from err + + if not devices: + _LOGGER.error("No linked devices found for home ID %s", self.home_id) + raise UpdateFailed(f"No linked devices found for home ID {self.home_id}") + + return await self.hass.async_add_executor_job(self._update_device_info, devices) + + def _update_device_info(self, devices: list[dict[str, Any]]) -> dict[str, dict]: + """Update the device data from Tado.""" + mapped_devices: dict[str, dict] = {} + for device in devices: + device_short_serial_no = device["shortSerialNo"] + _LOGGER.debug("Updating device %s", device_short_serial_no) + try: + if ( + INSIDE_TEMPERATURE_MEASUREMENT + in device["characteristics"]["capabilities"] + ): + _LOGGER.debug( + "Updating temperature offset for device %s", + device_short_serial_no, + ) + device[TEMP_OFFSET] = self._tado.get_device_info( + device_short_serial_no, TEMP_OFFSET + ) + except RequestException as err: + _LOGGER.error( + "Error updating device %s: %s", device_short_serial_no, err + ) + + _LOGGER.debug( + "Device %s updated, with data: %s", device_short_serial_no, device + ) + mapped_devices[device_short_serial_no] = device + + return mapped_devices + + async def _async_update_zones(self) -> dict[int, dict]: + """Update the zone data from Tado.""" + + try: + zone_states_call = await self.hass.async_add_executor_job( + self._tado.get_zone_states + ) + zone_states = zone_states_call["zoneStates"] + except RequestException as err: + _LOGGER.error("Error updating Tado zones: %s", err) + raise UpdateFailed(f"Error updating Tado zones: {err}") from err + + mapped_zones: dict[int, dict] = {} + for zone in zone_states: + mapped_zones[int(zone)] = await self._update_zone(int(zone)) + + return mapped_zones + + async def _update_zone(self, zone_id: int) -> dict[str, str]: + """Update the internal data of a zone.""" + + _LOGGER.debug("Updating zone %s", zone_id) + try: + data = await self.hass.async_add_executor_job( + self._tado.get_zone_state, zone_id + ) + except RequestException as err: + _LOGGER.error("Error updating Tado zone %s: %s", zone_id, err) + raise UpdateFailed(f"Error updating Tado zone {zone_id}: {err}") from err + + _LOGGER.debug("Zone %s updated, with data: %s", zone_id, data) + return data + + async def _async_update_home(self) -> dict[str, dict]: + """Update the home data from Tado.""" + + try: + weather = await self.hass.async_add_executor_job(self._tado.get_weather) + geofence = await self.hass.async_add_executor_job(self._tado.get_home_state) + except RequestException as err: + _LOGGER.error("Error updating Tado home: %s", err) + raise UpdateFailed(f"Error updating Tado home: {err}") from err + + _LOGGER.debug( + "Home data updated, with weather and geofence data: %s, %s", + weather, + geofence, + ) + + return {"weather": weather, "geofence": geofence} + + async def get_capabilities(self, zone_id: int | str) -> dict: + """Fetch the capabilities from Tado.""" + + try: + return await self.hass.async_add_executor_job( + self._tado.get_capabilities, zone_id + ) + except RequestException as err: + raise UpdateFailed(f"Error updating Tado data: {err}") from err + + async def get_auto_geofencing_supported(self) -> bool: + """Fetch the auto geofencing supported from Tado.""" + + try: + return await self.hass.async_add_executor_job( + self._tado.get_auto_geofencing_supported + ) + except RequestException as err: + raise UpdateFailed(f"Error updating Tado data: {err}") from err + + async def reset_zone_overlay(self, zone_id): + """Reset the zone back to the default operation.""" + + try: + await self.hass.async_add_executor_job( + self._tado.reset_zone_overlay, zone_id + ) + await self._update_zone(zone_id) + except RequestException as err: + raise UpdateFailed(f"Error resetting Tado data: {err}") from err + + async def set_presence( + self, + presence=PRESET_HOME, + ): + """Set the presence to home, away or auto.""" + + if presence == PRESET_AWAY: + await self.hass.async_add_executor_job(self._tado.set_away) + elif presence == PRESET_HOME: + await self.hass.async_add_executor_job(self._tado.set_home) + elif presence == PRESET_AUTO: + await self.hass.async_add_executor_job(self._tado.set_auto) + + async def set_zone_overlay( + self, + zone_id=None, + overlay_mode=None, + temperature=None, + duration=None, + device_type="HEATING", + mode=None, + fan_speed=None, + swing=None, + fan_level=None, + vertical_swing=None, + horizontal_swing=None, + ) -> None: + """Set a zone overlay.""" + + _LOGGER.debug( + "Set overlay for zone %s: overlay_mode=%s, temp=%s, duration=%s, type=%s, mode=%s, fan_speed=%s, swing=%s, fan_level=%s, vertical_swing=%s, horizontal_swing=%s", + zone_id, + overlay_mode, + temperature, + duration, + device_type, + mode, + fan_speed, + swing, + fan_level, + vertical_swing, + horizontal_swing, + ) + + try: + await self.hass.async_add_executor_job( + self._tado.set_zone_overlay, + zone_id, + overlay_mode, + temperature, + duration, + device_type, + "ON", + mode, + fan_speed, + swing, + fan_level, + vertical_swing, + horizontal_swing, + ) + + except RequestException as err: + raise UpdateFailed(f"Error setting Tado overlay: {err}") from err + + await self._update_zone(zone_id) + + async def set_zone_off(self, zone_id, overlay_mode, device_type="HEATING"): + """Set a zone to off.""" + try: + await self.hass.async_add_executor_job( + self._tado.set_zone_overlay, + zone_id, + overlay_mode, + None, + None, + device_type, + "OFF", + ) + except RequestException as err: + raise UpdateFailed(f"Error setting Tado overlay: {err}") from err + + await self._update_zone(zone_id) + + async def set_temperature_offset(self, device_id, offset): + """Set temperature offset of device.""" + try: + await self.hass.async_add_executor_job( + self._tado.set_temp_offset, device_id, offset + ) + except RequestException as err: + raise UpdateFailed(f"Error setting Tado temperature offset: {err}") from err + + async def set_meter_reading(self, reading: int) -> dict[str, Any]: + """Send meter reading to Tado.""" + dt: str = datetime.now().strftime("%Y-%m-%d") + if self._tado is None: + raise HomeAssistantError("Tado client is not initialized") + + try: + return await self.hass.async_add_executor_job( + self._tado.set_eiq_meter_readings, dt, reading + ) + except RequestException as err: + raise UpdateFailed(f"Error setting Tado meter reading: {err}") from err + + +class TadoMobileDeviceUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): + """Class to manage the mobile devices from Tado via PyTado.""" + + def __init__( + self, + hass: HomeAssistant, + entry: ConfigEntry, + tado: Tado, + ) -> None: + """Initialize the Tado data update coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_MOBILE_DEVICE_INTERVAL, + ) + self._tado = tado + self.data: dict[str, dict] = {} + + async def _async_update_data(self) -> dict[str, dict]: + """Fetch the latest data from Tado.""" + + try: + mobile_devices = await self.hass.async_add_executor_job( + self._tado.get_mobile_devices + ) + except RequestException as err: + _LOGGER.error("Error updating Tado mobile devices: %s", err) + raise UpdateFailed(f"Error updating Tado mobile devices: {err}") from err + + mapped_mobile_devices: dict[str, dict] = {} + for mobile_device in mobile_devices: + mobile_device_id = mobile_device["id"] + _LOGGER.debug("Updating mobile device %s", mobile_device_id) + try: + mapped_mobile_devices[mobile_device_id] = mobile_device + _LOGGER.debug( + "Mobile device %s updated, with data: %s", + mobile_device_id, + mobile_device, + ) + except RequestException: + _LOGGER.error( + "Unable to connect to Tado while updating mobile device %s", + mobile_device_id, + ) + + self.data["mobile_device"] = mapped_mobile_devices + return self.data diff --git a/homeassistant/components/tado/device_tracker.py b/homeassistant/components/tado/device_tracker.py index 95e031329c3..a9be560f434 100644 --- a/homeassistant/components/tado/device_tracker.py +++ b/homeassistant/components/tado/device_tracker.py @@ -11,12 +11,15 @@ from homeassistant.components.device_tracker import ( from homeassistant.const import STATE_HOME, STATE_NOT_HOME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) from . import TadoConfigEntry -from .const import DOMAIN, SIGNAL_TADO_MOBILE_DEVICE_UPDATE_RECEIVED -from .tado_connector import TadoConnector +from .const import DOMAIN +from .coordinator import TadoMobileDeviceUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -28,7 +31,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado device scannery entity.""" _LOGGER.debug("Setting up Tado device scanner entity") - tado = entry.runtime_data + tado = entry.runtime_data.mobile_coordinator tracked: set = set() # Fix non-string unique_id for device trackers @@ -49,58 +52,56 @@ async def async_setup_entry( update_devices() - entry.async_on_unload( - async_dispatcher_connect( - hass, - SIGNAL_TADO_MOBILE_DEVICE_UPDATE_RECEIVED.format(tado.home_id), - update_devices, - ) - ) - @callback def add_tracked_entities( hass: HomeAssistant, - tado: TadoConnector, + coordinator: TadoMobileDeviceUpdateCoordinator, async_add_entities: AddEntitiesCallback, tracked: set[str], ) -> None: """Add new tracker entities from Tado.""" _LOGGER.debug("Fetching Tado devices from API for (newly) tracked entities") new_tracked = [] - for device_key, device in tado.data["mobile_device"].items(): + for device_key, device in coordinator.data["mobile_device"].items(): if device_key in tracked: continue _LOGGER.debug( "Adding Tado device %s with deviceID %s", device["name"], device_key ) - new_tracked.append(TadoDeviceTrackerEntity(device_key, device["name"], tado)) + new_tracked.append( + TadoDeviceTrackerEntity(device_key, device["name"], coordinator) + ) tracked.add(device_key) async_add_entities(new_tracked) -class TadoDeviceTrackerEntity(TrackerEntity): +class TadoDeviceTrackerEntity(CoordinatorEntity[DataUpdateCoordinator], TrackerEntity): """A Tado Device Tracker entity.""" - _attr_should_poll = False _attr_available = False def __init__( self, device_id: str, device_name: str, - tado: TadoConnector, + coordinator: TadoMobileDeviceUpdateCoordinator, ) -> None: """Initialize a Tado Device Tracker entity.""" - super().__init__() + super().__init__(coordinator) self._attr_unique_id = str(device_id) self._device_id = device_id self._device_name = device_name - self._tado = tado self._active = False + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self.update_state() + super()._handle_coordinator_update() + @callback def update_state(self) -> None: """Update the Tado device.""" @@ -109,7 +110,7 @@ class TadoDeviceTrackerEntity(TrackerEntity): self._device_name, self._device_id, ) - device = self._tado.data["mobile_device"][self._device_id] + device = self.coordinator.data["mobile_device"][self._device_id] self._attr_available = False _LOGGER.debug( @@ -129,25 +130,6 @@ class TadoDeviceTrackerEntity(TrackerEntity): else: _LOGGER.debug("Tado device %s is not at home", device["name"]) - @callback - def on_demand_update(self) -> None: - """Update state on demand.""" - self.update_state() - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Register state update callback.""" - _LOGGER.debug("Registering Tado device tracker entity") - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_MOBILE_DEVICE_UPDATE_RECEIVED.format(self._tado.home_id), - self.on_demand_update, - ) - ) - - self.update_state() - @property def name(self) -> str: """Return the name of the device.""" diff --git a/homeassistant/components/tado/entity.py b/homeassistant/components/tado/entity.py index 6bb90ab849a..971b2863aba 100644 --- a/homeassistant/components/tado/entity.py +++ b/homeassistant/components/tado/entity.py @@ -1,21 +1,30 @@ """Base class for Tado entity.""" +import logging + from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TadoConnector from .const import DEFAULT_NAME, DOMAIN, TADO_HOME, TADO_ZONE +from .coordinator import TadoDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) -class TadoDeviceEntity(Entity): - """Base implementation for Tado device.""" +class TadoCoordinatorEntity(CoordinatorEntity[TadoDataUpdateCoordinator]): + """Base class for Tado entity.""" - _attr_should_poll = False _attr_has_entity_name = True - def __init__(self, device_info: dict[str, str]) -> None: + +class TadoDeviceEntity(TadoCoordinatorEntity): + """Base implementation for Tado device.""" + + def __init__( + self, device_info: dict[str, str], coordinator: TadoDataUpdateCoordinator + ) -> None: """Initialize a Tado device.""" - super().__init__() + super().__init__(coordinator) self._device_info = device_info self.device_name = device_info["serialNo"] self.device_id = device_info["shortSerialNo"] @@ -30,35 +39,35 @@ class TadoDeviceEntity(Entity): ) -class TadoHomeEntity(Entity): +class TadoHomeEntity(TadoCoordinatorEntity): """Base implementation for Tado home.""" - _attr_should_poll = False - _attr_has_entity_name = True - - def __init__(self, tado: TadoConnector) -> None: + def __init__(self, coordinator: TadoDataUpdateCoordinator) -> None: """Initialize a Tado home.""" - super().__init__() - self.home_name = tado.home_name - self.home_id = tado.home_id + super().__init__(coordinator) + self.home_name = coordinator.home_name + self.home_id = coordinator.home_id self._attr_device_info = DeviceInfo( configuration_url="https://app.tado.com", - identifiers={(DOMAIN, str(tado.home_id))}, + identifiers={(DOMAIN, str(coordinator.home_id))}, manufacturer=DEFAULT_NAME, model=TADO_HOME, - name=tado.home_name, + name=coordinator.home_name, ) -class TadoZoneEntity(Entity): +class TadoZoneEntity(TadoCoordinatorEntity): """Base implementation for Tado zone.""" - _attr_has_entity_name = True - _attr_should_poll = False - - def __init__(self, zone_name: str, home_id: int, zone_id: int) -> None: + def __init__( + self, + zone_name: str, + home_id: int, + zone_id: int, + coordinator: TadoDataUpdateCoordinator, + ) -> None: """Initialize a Tado zone.""" - super().__init__() + super().__init__(coordinator) self.zone_name = zone_name self.zone_id = zone_id self._attr_device_info = DeviceInfo( diff --git a/homeassistant/components/tado/helper.py b/homeassistant/components/tado/helper.py index 558aee164d0..571a757a3e8 100644 --- a/homeassistant/components/tado/helper.py +++ b/homeassistant/components/tado/helper.py @@ -5,26 +5,27 @@ from .const import ( CONST_OVERLAY_TADO_MODE, CONST_OVERLAY_TIMER, ) -from .tado_connector import TadoConnector +from .coordinator import TadoDataUpdateCoordinator def decide_overlay_mode( - tado: TadoConnector, + coordinator: TadoDataUpdateCoordinator, duration: int | None, zone_id: int, overlay_mode: str | None = None, ) -> str: """Return correct overlay mode based on the action and defaults.""" + # If user gave duration then overlay mode needs to be timer if duration: return CONST_OVERLAY_TIMER # If no duration or timer set to fallback setting if overlay_mode is None: - overlay_mode = tado.fallback or CONST_OVERLAY_TADO_MODE + overlay_mode = coordinator.fallback or CONST_OVERLAY_TADO_MODE # If default is Tado default then look it up if overlay_mode == CONST_OVERLAY_TADO_DEFAULT: overlay_mode = ( - tado.data["zone"][zone_id].default_overlay_termination_type + coordinator.data["zone"][zone_id].default_overlay_termination_type or CONST_OVERLAY_TADO_MODE ) @@ -32,18 +33,19 @@ def decide_overlay_mode( def decide_duration( - tado: TadoConnector, + coordinator: TadoDataUpdateCoordinator, duration: int | None, zone_id: int, overlay_mode: str | None = None, ) -> None | int: """Return correct duration based on the selected overlay mode/duration and tado config.""" + # If we ended up with a timer but no duration, set a default duration # If we ended up with a timer but no duration, set a default duration if overlay_mode == CONST_OVERLAY_TIMER and duration is None: duration = ( - int(tado.data["zone"][zone_id].default_overlay_termination_duration) - if tado.data["zone"][zone_id].default_overlay_termination_duration + int(coordinator.data["zone"][zone_id].default_overlay_termination_duration) + if coordinator.data["zone"][zone_id].default_overlay_termination_duration is not None else 3600 ) @@ -53,6 +55,7 @@ def decide_duration( def generate_supported_fanmodes(tado_to_ha_mapping: dict[str, str], options: list[str]): """Return correct list of fan modes or None.""" + supported_fanmodes = [ tado_to_ha_mapping.get(option) for option in options diff --git a/homeassistant/components/tado/models.py b/homeassistant/components/tado/models.py new file mode 100644 index 00000000000..08bdaceaf03 --- /dev/null +++ b/homeassistant/components/tado/models.py @@ -0,0 +1,13 @@ +"""Models for use in Tado integration.""" + +from dataclasses import dataclass + +from .coordinator import TadoDataUpdateCoordinator, TadoMobileDeviceUpdateCoordinator + + +@dataclass +class TadoData: + """Class to hold Tado data.""" + + coordinator: TadoDataUpdateCoordinator + mobile_coordinator: TadoMobileDeviceUpdateCoordinator diff --git a/homeassistant/components/tado/sensor.py b/homeassistant/components/tado/sensor.py index 8bb13a02cd1..037b33574e7 100644 --- a/homeassistant/components/tado/sensor.py +++ b/homeassistant/components/tado/sensor.py @@ -15,7 +15,6 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -24,13 +23,12 @@ from .const import ( CONDITIONS_MAP, SENSOR_DATA_CATEGORY_GEOFENCE, SENSOR_DATA_CATEGORY_WEATHER, - SIGNAL_TADO_UPDATE_RECEIVED, TYPE_AIR_CONDITIONING, TYPE_HEATING, TYPE_HOT_WATER, ) +from .coordinator import TadoDataUpdateCoordinator from .entity import TadoHomeEntity, TadoZoneEntity -from .tado_connector import TadoConnector _LOGGER = logging.getLogger(__name__) @@ -197,7 +195,7 @@ async def async_setup_entry( ) -> None: """Set up the Tado sensor platform.""" - tado = entry.runtime_data + tado = entry.runtime_data.coordinator zones = tado.zones entities: list[SensorEntity] = [] @@ -232,39 +230,22 @@ class TadoHomeSensor(TadoHomeEntity, SensorEntity): entity_description: TadoSensorEntityDescription def __init__( - self, tado: TadoConnector, entity_description: TadoSensorEntityDescription + self, + coordinator: TadoDataUpdateCoordinator, + entity_description: TadoSensorEntityDescription, ) -> None: """Initialize of the Tado Sensor.""" self.entity_description = entity_description - super().__init__(tado) - self._tado = tado + super().__init__(coordinator) - self._attr_unique_id = f"{entity_description.key} {tado.home_id}" - - async def async_added_to_hass(self) -> None: - """Register for sensor updates.""" - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format(self._tado.home_id, "home", "data"), - self._async_update_callback, - ) - ) - self._async_update_home_data() + self._attr_unique_id = f"{entity_description.key} {coordinator.home_id}" @callback - def _async_update_callback(self) -> None: - """Update and write state.""" - self._async_update_home_data() - self.async_write_ha_state() - - @callback - def _async_update_home_data(self) -> None: - """Handle update callbacks.""" + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" try: - tado_weather_data = self._tado.data["weather"] - tado_geofence_data = self._tado.data["geofence"] + tado_weather_data = self.coordinator.data["weather"] + tado_geofence_data = self.coordinator.data["geofence"] except KeyError: return @@ -278,6 +259,7 @@ class TadoHomeSensor(TadoHomeEntity, SensorEntity): self._attr_extra_state_attributes = self.entity_description.attributes_fn( tado_sensor_data ) + super()._handle_coordinator_update() class TadoZoneSensor(TadoZoneEntity, SensorEntity): @@ -287,43 +269,24 @@ class TadoZoneSensor(TadoZoneEntity, SensorEntity): def __init__( self, - tado: TadoConnector, + coordinator: TadoDataUpdateCoordinator, zone_name: str, zone_id: int, entity_description: TadoSensorEntityDescription, ) -> None: """Initialize of the Tado Sensor.""" self.entity_description = entity_description - self._tado = tado - super().__init__(zone_name, tado.home_id, zone_id) + super().__init__(zone_name, coordinator.home_id, zone_id, coordinator) - self._attr_unique_id = f"{entity_description.key} {zone_id} {tado.home_id}" - - async def async_added_to_hass(self) -> None: - """Register for sensor updates.""" - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format( - self._tado.home_id, "zone", self.zone_id - ), - self._async_update_callback, - ) + self._attr_unique_id = ( + f"{entity_description.key} {zone_id} {coordinator.home_id}" ) - self._async_update_zone_data() @callback - def _async_update_callback(self) -> None: - """Update and write state.""" - self._async_update_zone_data() - self.async_write_ha_state() - - @callback - def _async_update_zone_data(self) -> None: - """Handle update callbacks.""" + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" try: - tado_zone_data = self._tado.data["zone"][self.zone_id] + tado_zone_data = self.coordinator.data["zone"][self.zone_id] except KeyError: return @@ -332,3 +295,4 @@ class TadoZoneSensor(TadoZoneEntity, SensorEntity): self._attr_extra_state_attributes = self.entity_description.attributes_fn( tado_zone_data ) + super()._handle_coordinator_update() diff --git a/homeassistant/components/tado/services.py b/homeassistant/components/tado/services.py index 89711808066..d931ea303e9 100644 --- a/homeassistant/components/tado/services.py +++ b/homeassistant/components/tado/services.py @@ -43,11 +43,8 @@ def setup_services(hass: HomeAssistant) -> None: if entry is None: raise ServiceValidationError("Config entry not found") - tadoconnector = entry.runtime_data - - response: dict = await hass.async_add_executor_job( - tadoconnector.set_meter_reading, call.data[CONF_READING] - ) + coordinator = entry.runtime_data.coordinator + response: dict = await coordinator.set_meter_reading(call.data[CONF_READING]) if ATTR_MESSAGE in response: raise HomeAssistantError(response[ATTR_MESSAGE]) diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index 8124570f9c9..735fe34bcf4 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -135,12 +135,12 @@ } }, "add_meter_reading": { - "name": "Add meter readings", - "description": "Add meter readings to Tado Energy IQ.", + "name": "Add meter reading", + "description": "Adds a meter reading to Tado Energy IQ.", "fields": { "config_entry": { "name": "Config Entry", - "description": "Config entry to add meter readings to." + "description": "Config entry to add meter reading to." }, "reading": { "name": "Reading", diff --git a/homeassistant/components/tado/tado_connector.py b/homeassistant/components/tado/tado_connector.py deleted file mode 100644 index 5ed53675153..00000000000 --- a/homeassistant/components/tado/tado_connector.py +++ /dev/null @@ -1,332 +0,0 @@ -"""Tado Connector a class to store the data as an object.""" - -from datetime import datetime, timedelta -import logging -from typing import Any - -from PyTado.interface import Tado -from requests import RequestException - -from homeassistant.components.climate import PRESET_AWAY, PRESET_HOME -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import dispatcher_send -from homeassistant.util import Throttle - -from .const import ( - INSIDE_TEMPERATURE_MEASUREMENT, - PRESET_AUTO, - SIGNAL_TADO_MOBILE_DEVICE_UPDATE_RECEIVED, - SIGNAL_TADO_UPDATE_RECEIVED, - TEMP_OFFSET, -) - -MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=4) -SCAN_INTERVAL = timedelta(minutes=5) -SCAN_MOBILE_DEVICE_INTERVAL = timedelta(seconds=30) - - -_LOGGER = logging.getLogger(__name__) - - -class TadoConnector: - """An object to store the Tado data.""" - - def __init__( - self, hass: HomeAssistant, username: str, password: str, fallback: str - ) -> None: - """Initialize Tado Connector.""" - self.hass = hass - self._username = username - self._password = password - self._fallback = fallback - - self.home_id: int = 0 - self.home_name = None - self.tado = None - self.zones: list[dict[Any, Any]] = [] - self.devices: list[dict[Any, Any]] = [] - self.data: dict[str, dict] = { - "device": {}, - "mobile_device": {}, - "weather": {}, - "geofence": {}, - "zone": {}, - } - - @property - def fallback(self): - """Return fallback flag to Smart Schedule.""" - return self._fallback - - def setup(self): - """Connect to Tado and fetch the zones.""" - self.tado = Tado(self._username, self._password) - # Load zones and devices - self.zones = self.tado.get_zones() - self.devices = self.tado.get_devices() - tado_home = self.tado.get_me()["homes"][0] - self.home_id = tado_home["id"] - self.home_name = tado_home["name"] - - def get_mobile_devices(self): - """Return the Tado mobile devices.""" - return self.tado.get_mobile_devices() - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - def update(self): - """Update the registered zones.""" - self.update_devices() - self.update_mobile_devices() - self.update_zones() - self.update_home() - - def update_mobile_devices(self) -> None: - """Update the mobile devices.""" - try: - mobile_devices = self.get_mobile_devices() - except RuntimeError: - _LOGGER.error("Unable to connect to Tado while updating mobile devices") - return - - if not mobile_devices: - _LOGGER.debug("No linked mobile devices found for home ID %s", self.home_id) - return - - # Errors are planned to be converted to exceptions - # in PyTado library, so this can be removed - if isinstance(mobile_devices, dict) and mobile_devices.get("errors"): - _LOGGER.error( - "Error for home ID %s while updating mobile devices: %s", - self.home_id, - mobile_devices["errors"], - ) - return - - for mobile_device in mobile_devices: - self.data["mobile_device"][mobile_device["id"]] = mobile_device - _LOGGER.debug( - "Dispatching update to %s mobile device: %s", - self.home_id, - mobile_device, - ) - - dispatcher_send( - self.hass, - SIGNAL_TADO_MOBILE_DEVICE_UPDATE_RECEIVED.format(self.home_id), - ) - - def update_devices(self): - """Update the device data from Tado.""" - try: - devices = self.tado.get_devices() - except RuntimeError: - _LOGGER.error("Unable to connect to Tado while updating devices") - return - - if not devices: - _LOGGER.debug("No linked devices found for home ID %s", self.home_id) - return - - # Errors are planned to be converted to exceptions - # in PyTado library, so this can be removed - if isinstance(devices, dict) and devices.get("errors"): - _LOGGER.error( - "Error for home ID %s while updating devices: %s", - self.home_id, - devices["errors"], - ) - return - - for device in devices: - device_short_serial_no = device["shortSerialNo"] - _LOGGER.debug("Updating device %s", device_short_serial_no) - try: - if ( - INSIDE_TEMPERATURE_MEASUREMENT - in device["characteristics"]["capabilities"] - ): - device[TEMP_OFFSET] = self.tado.get_device_info( - device_short_serial_no, TEMP_OFFSET - ) - except RuntimeError: - _LOGGER.error( - "Unable to connect to Tado while updating device %s", - device_short_serial_no, - ) - return - - self.data["device"][device_short_serial_no] = device - - _LOGGER.debug( - "Dispatching update to %s device %s: %s", - self.home_id, - device_short_serial_no, - device, - ) - dispatcher_send( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format( - self.home_id, "device", device_short_serial_no - ), - ) - - def update_zones(self): - """Update the zone data from Tado.""" - try: - zone_states = self.tado.get_zone_states()["zoneStates"] - except RuntimeError: - _LOGGER.error("Unable to connect to Tado while updating zones") - return - - for zone in zone_states: - self.update_zone(int(zone)) - - def update_zone(self, zone_id): - """Update the internal data from Tado.""" - _LOGGER.debug("Updating zone %s", zone_id) - try: - data = self.tado.get_zone_state(zone_id) - except RuntimeError: - _LOGGER.error("Unable to connect to Tado while updating zone %s", zone_id) - return - - self.data["zone"][zone_id] = data - - _LOGGER.debug( - "Dispatching update to %s zone %s: %s", - self.home_id, - zone_id, - data, - ) - dispatcher_send( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format(self.home_id, "zone", zone_id), - ) - - def update_home(self): - """Update the home data from Tado.""" - try: - self.data["weather"] = self.tado.get_weather() - self.data["geofence"] = self.tado.get_home_state() - dispatcher_send( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format(self.home_id, "home", "data"), - ) - except RuntimeError: - _LOGGER.error( - "Unable to connect to Tado while updating weather and geofence data" - ) - return - - def get_capabilities(self, zone_id): - """Return the capabilities of the devices.""" - return self.tado.get_capabilities(zone_id) - - def get_auto_geofencing_supported(self): - """Return whether the Tado Home supports auto geofencing.""" - return self.tado.get_auto_geofencing_supported() - - def reset_zone_overlay(self, zone_id): - """Reset the zone back to the default operation.""" - self.tado.reset_zone_overlay(zone_id) - self.update_zone(zone_id) - - def set_presence( - self, - presence=PRESET_HOME, - ): - """Set the presence to home, away or auto.""" - if presence == PRESET_AWAY: - self.tado.set_away() - elif presence == PRESET_HOME: - self.tado.set_home() - elif presence == PRESET_AUTO: - self.tado.set_auto() - - # Update everything when changing modes - self.update_zones() - self.update_home() - - def set_zone_overlay( - self, - zone_id=None, - overlay_mode=None, - temperature=None, - duration=None, - device_type="HEATING", - mode=None, - fan_speed=None, - swing=None, - fan_level=None, - vertical_swing=None, - horizontal_swing=None, - ): - """Set a zone overlay.""" - _LOGGER.debug( - ( - "Set overlay for zone %s: overlay_mode=%s, temp=%s, duration=%s," - " type=%s, mode=%s fan_speed=%s swing=%s fan_level=%s vertical_swing=%s horizontal_swing=%s" - ), - zone_id, - overlay_mode, - temperature, - duration, - device_type, - mode, - fan_speed, - swing, - fan_level, - vertical_swing, - horizontal_swing, - ) - - try: - self.tado.set_zone_overlay( - zone_id, - overlay_mode, - temperature, - duration, - device_type, - "ON", - mode, - fan_speed=fan_speed, - swing=swing, - fan_level=fan_level, - vertical_swing=vertical_swing, - horizontal_swing=horizontal_swing, - ) - - except RequestException as exc: - _LOGGER.error("Could not set zone overlay: %s", exc) - - self.update_zone(zone_id) - - def set_zone_off(self, zone_id, overlay_mode, device_type="HEATING"): - """Set a zone to off.""" - try: - self.tado.set_zone_overlay( - zone_id, overlay_mode, None, None, device_type, "OFF" - ) - except RequestException as exc: - _LOGGER.error("Could not set zone overlay: %s", exc) - - self.update_zone(zone_id) - - def set_temperature_offset(self, device_id, offset): - """Set temperature offset of device.""" - try: - self.tado.set_temp_offset(device_id, offset) - except RequestException as exc: - _LOGGER.error("Could not set temperature offset: %s", exc) - - def set_meter_reading(self, reading: int) -> dict[str, Any]: - """Send meter reading to Tado.""" - dt: str = datetime.now().strftime("%Y-%m-%d") - if self.tado is None: - raise HomeAssistantError("Tado client is not initialized") - - try: - return self.tado.set_eiq_meter_readings(date=dt, reading=reading) - except RequestException as exc: - raise HomeAssistantError("Could not set meter reading") from exc diff --git a/homeassistant/components/tado/water_heater.py b/homeassistant/components/tado/water_heater.py index 6c964cfaddd..02fbb3f5e23 100644 --- a/homeassistant/components/tado/water_heater.py +++ b/homeassistant/components/tado/water_heater.py @@ -12,7 +12,6 @@ from homeassistant.components.water_heater import ( from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv, entity_platform -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType @@ -26,13 +25,12 @@ from .const import ( CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_MODE, CONST_OVERLAY_TIMER, - SIGNAL_TADO_UPDATE_RECEIVED, TYPE_HOT_WATER, ) +from .coordinator import TadoDataUpdateCoordinator from .entity import TadoZoneEntity from .helper import decide_duration, decide_overlay_mode from .repairs import manage_water_heater_fallback_issue -from .tado_connector import TadoConnector _LOGGER = logging.getLogger(__name__) @@ -67,8 +65,9 @@ async def async_setup_entry( ) -> None: """Set up the Tado water heater platform.""" - tado = entry.runtime_data - entities = await hass.async_add_executor_job(_generate_entities, tado) + data = entry.runtime_data + coordinator = data.coordinator + entities = await _generate_entities(coordinator) platform = entity_platform.async_get_current_platform() @@ -83,27 +82,29 @@ async def async_setup_entry( manage_water_heater_fallback_issue( hass=hass, water_heater_names=[e.zone_name for e in entities], - integration_overlay_fallback=tado.fallback, + integration_overlay_fallback=coordinator.fallback, ) -def _generate_entities(tado: TadoConnector) -> list: +async def _generate_entities(coordinator: TadoDataUpdateCoordinator) -> list: """Create all water heater entities.""" entities = [] - for zone in tado.zones: + for zone in coordinator.zones: if zone["type"] == TYPE_HOT_WATER: - entity = create_water_heater_entity( - tado, zone["name"], zone["id"], str(zone["name"]) + entity = await create_water_heater_entity( + coordinator, zone["name"], zone["id"], str(zone["name"]) ) entities.append(entity) return entities -def create_water_heater_entity(tado: TadoConnector, name: str, zone_id: int, zone: str): +async def create_water_heater_entity( + coordinator: TadoDataUpdateCoordinator, name: str, zone_id: int, zone: str +): """Create a Tado water heater device.""" - capabilities = tado.get_capabilities(zone_id) + capabilities = await coordinator.get_capabilities(zone_id) supports_temperature_control = capabilities["canSetTemperature"] @@ -116,7 +117,7 @@ def create_water_heater_entity(tado: TadoConnector, name: str, zone_id: int, zon max_temp = None return TadoWaterHeater( - tado, + coordinator, name, zone_id, supports_temperature_control, @@ -134,7 +135,7 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): def __init__( self, - tado: TadoConnector, + coordinator: TadoDataUpdateCoordinator, zone_name: str, zone_id: int, supports_temperature_control: bool, @@ -142,11 +143,10 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): max_temp, ) -> None: """Initialize of Tado water heater entity.""" - self._tado = tado - super().__init__(zone_name, tado.home_id, zone_id) + super().__init__(zone_name, coordinator.home_id, zone_id, coordinator) self.zone_id = zone_id - self._attr_unique_id = f"{zone_id} {tado.home_id}" + self._attr_unique_id = f"{zone_id} {coordinator.home_id}" self._device_is_active = False @@ -164,19 +164,14 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): self._overlay_mode = CONST_MODE_SMART_SCHEDULE self._tado_zone_data: Any = None - async def async_added_to_hass(self) -> None: - """Register for sensor updates.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_TADO_UPDATE_RECEIVED.format( - self._tado.home_id, "zone", self.zone_id - ), - self._async_update_callback, - ) - ) self._async_update_data() + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._async_update_data() + super()._handle_coordinator_update() + @property def current_operation(self) -> str | None: """Return current readable operation mode.""" @@ -202,7 +197,7 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): """Return the maximum temperature.""" return self._max_temperature - def set_operation_mode(self, operation_mode: str) -> None: + async def async_set_operation_mode(self, operation_mode: str) -> None: """Set new operation mode.""" mode = None @@ -213,18 +208,20 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): elif operation_mode == MODE_HEAT: mode = CONST_MODE_HEAT - self._control_heater(hvac_mode=mode) + await self._control_heater(hvac_mode=mode) + await self.coordinator.async_request_refresh() - def set_timer(self, time_period: int, temperature: float | None = None): + async def set_timer(self, time_period: int, temperature: float | None = None): """Set the timer on the entity, and temperature if supported.""" if not self._supports_temperature_control and temperature is not None: temperature = None - self._control_heater( + await self._control_heater( hvac_mode=CONST_MODE_HEAT, target_temp=temperature, duration=time_period ) + await self.coordinator.async_request_refresh() - def set_temperature(self, **kwargs: Any) -> None: + async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if not self._supports_temperature_control or temperature is None: @@ -235,10 +232,11 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): CONST_MODE_AUTO, CONST_MODE_SMART_SCHEDULE, ): - self._control_heater(target_temp=temperature) + await self._control_heater(target_temp=temperature) return - self._control_heater(target_temp=temperature, hvac_mode=CONST_MODE_HEAT) + await self._control_heater(target_temp=temperature, hvac_mode=CONST_MODE_HEAT) + await self.coordinator.async_request_refresh() @callback def _async_update_callback(self) -> None: @@ -250,10 +248,10 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): def _async_update_data(self) -> None: """Load tado data.""" _LOGGER.debug("Updating water_heater platform for zone %d", self.zone_id) - self._tado_zone_data = self._tado.data["zone"][self.zone_id] + self._tado_zone_data = self.coordinator.data["zone"][self.zone_id] self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode - def _control_heater( + async def _control_heater( self, hvac_mode: str | None = None, target_temp: float | None = None, @@ -276,23 +274,26 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): self.zone_name, self.zone_id, ) - self._tado.reset_zone_overlay(self.zone_id) + await self.coordinator.reset_zone_overlay(self.zone_id) + await self.coordinator.async_request_refresh() return if self._current_tado_hvac_mode == CONST_MODE_OFF: _LOGGER.debug( "Switching to OFF for zone %s (%d)", self.zone_name, self.zone_id ) - self._tado.set_zone_off(self.zone_id, CONST_OVERLAY_MANUAL, TYPE_HOT_WATER) + await self.coordinator.set_zone_off( + self.zone_id, CONST_OVERLAY_MANUAL, TYPE_HOT_WATER + ) return overlay_mode = decide_overlay_mode( - tado=self._tado, + coordinator=self.coordinator, duration=duration, zone_id=self.zone_id, ) duration = decide_duration( - tado=self._tado, + coordinator=self.coordinator, duration=duration, zone_id=self.zone_id, overlay_mode=overlay_mode, @@ -304,7 +305,7 @@ class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity): self.zone_id, self._target_temp, ) - self._tado.set_zone_overlay( + await self.coordinator.set_zone_overlay( zone_id=self.zone_id, overlay_mode=overlay_mode, temperature=self._target_temp, diff --git a/homeassistant/components/tplink/binary_sensor.py b/homeassistant/components/tplink/binary_sensor.py index 6153ec31de1..e08495f5c88 100644 --- a/homeassistant/components/tplink/binary_sensor.py +++ b/homeassistant/components/tplink/binary_sensor.py @@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry -from .deprecate import async_cleanup_deprecated from .entity import CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription @@ -88,12 +87,10 @@ async def async_setup_entry( feature_type=Feature.Type.BinarySensor, entity_class=TPLinkBinarySensorEntity, descriptions=BINARYSENSOR_DESCRIPTIONS_MAP, + platform_domain=BINARY_SENSOR_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) - async_cleanup_deprecated( - hass, BINARY_SENSOR_DOMAIN, config_entry.entry_id, entities - ) async_add_entities(entities) _check_device() diff --git a/homeassistant/components/tplink/button.py b/homeassistant/components/tplink/button.py index 990f0a608d3..0a4517b967d 100644 --- a/homeassistant/components/tplink/button.py +++ b/homeassistant/components/tplink/button.py @@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry -from .deprecate import DeprecatedInfo, async_cleanup_deprecated +from .deprecate import DeprecatedInfo from .entity import CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription @@ -95,10 +95,10 @@ async def async_setup_entry( feature_type=Feature.Type.Action, entity_class=TPLinkButtonEntity, descriptions=BUTTON_DESCRIPTIONS_MAP, + platform_domain=BUTTON_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) - async_cleanup_deprecated(hass, BUTTON_DOMAIN, config_entry.entry_id, entities) async_add_entities(entities) _check_device() diff --git a/homeassistant/components/tplink/camera.py b/homeassistant/components/tplink/camera.py index 61a08887f5f..b0f1f1a62c1 100644 --- a/homeassistant/components/tplink/camera.py +++ b/homeassistant/components/tplink/camera.py @@ -11,6 +11,7 @@ from kasa import Device, Module, StreamResolution from homeassistant.components import ffmpeg, stream from homeassistant.components.camera import ( + DOMAIN as CAMERA_DOMAIN, Camera, CameraEntityDescription, CameraEntityFeature, @@ -20,7 +21,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TPLinkConfigEntry, legacy_device_id +from . import TPLinkConfigEntry from .const import CONF_CAMERA_CREDENTIALS from .coordinator import TPLinkDataUpdateCoordinator from .entity import CoordinatedTPLinkModuleEntity, TPLinkModuleEntityDescription @@ -75,6 +76,7 @@ async def async_setup_entry( coordinator=parent_coordinator, entity_class=TPLinkCameraEntity, descriptions=CAMERA_DESCRIPTIONS, + platform_domain=CAMERA_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) @@ -121,10 +123,6 @@ class TPLinkCameraEntity(CoordinatedTPLinkModuleEntity, Camera): self._can_stream = True self._http_mpeg_stream_running = False - def _get_unique_id(self) -> str: - """Return unique ID for the entity.""" - return f"{legacy_device_id(self._device)}-{self.entity_description.key}" - async def async_added_to_hass(self) -> None: """Call update attributes after the device is added to the platform.""" await super().async_added_to_hass() diff --git a/homeassistant/components/tplink/climate.py b/homeassistant/components/tplink/climate.py index a7dd865e7bb..7204c2a7665 100644 --- a/homeassistant/components/tplink/climate.py +++ b/homeassistant/components/tplink/climate.py @@ -2,27 +2,29 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass import logging from typing import Any, cast -from kasa import Device +from kasa import Device, Module from kasa.smart.modules.temperaturecontrol import ThermostatState from homeassistant.components.climate import ( ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, ClimateEntity, ClimateEntityDescription, ClimateEntityFeature, HVACAction, HVACMode, ) -from homeassistant.const import PRECISION_TENTHS +from homeassistant.const import PRECISION_TENTHS, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TPLinkConfigEntry +from . import TPLinkConfigEntry, legacy_device_id from .const import DOMAIN, UNIT_MAPPING from .coordinator import TPLinkDataUpdateCoordinator from .entity import ( @@ -40,6 +42,7 @@ STATE_TO_ACTION = { ThermostatState.Idle: HVACAction.IDLE, ThermostatState.Heating: HVACAction.HEATING, ThermostatState.Off: HVACAction.OFF, + ThermostatState.Calibrating: HVACAction.IDLE, } @@ -52,11 +55,15 @@ class TPLinkClimateEntityDescription( ): """Base class for climate entity description.""" + unique_id_fn: Callable[[Device, TPLinkModuleEntityDescription], str] = ( + lambda device, desc: f"{legacy_device_id(device)}_{desc.key}" + ) + CLIMATE_DESCRIPTIONS: tuple[TPLinkClimateEntityDescription, ...] = ( TPLinkClimateEntityDescription( key="climate", - exists_fn=lambda dev, _: dev.device_type is Device.Type.Thermostat, + exists_fn=lambda dev, _: Module.Thermostat in dev.modules, ), ) @@ -81,6 +88,7 @@ async def async_setup_entry( coordinator=parent_coordinator, entity_class=TPLinkClimateEntity, descriptions=CLIMATE_DESCRIPTIONS, + platform_domain=CLIMATE_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) @@ -117,27 +125,42 @@ class TPLinkClimateEntity(CoordinatedTPLinkModuleEntity, ClimateEntity): ) -> None: """Initialize the climate entity.""" super().__init__(device, coordinator, description, parent=parent) - self._state_feature = device.features["state"] - self._mode_feature = device.features["thermostat_mode"] - self._temp_feature = device.features["temperature"] - self._target_feature = device.features["target_temperature"] + self._thermostat_module = device.modules[Module.Thermostat] - self._attr_min_temp = self._target_feature.minimum_value - self._attr_max_temp = self._target_feature.maximum_value - self._attr_temperature_unit = UNIT_MAPPING[cast(str, self._temp_feature.unit)] + if target_feature := self._thermostat_module.get_feature("target_temperature"): + self._attr_min_temp = target_feature.minimum_value + self._attr_max_temp = target_feature.maximum_value + else: + _LOGGER.error( + "Unable to get min/max target temperature for %s, using defaults", + device.host, + ) + + if temperature_feature := self._thermostat_module.get_feature("temperature"): + self._attr_temperature_unit = UNIT_MAPPING[ + cast(str, temperature_feature.unit) + ] + else: + _LOGGER.error( + "Unable to get correct temperature unit for %s, defaulting to celsius", + device.host, + ) + self._attr_temperature_unit = UnitOfTemperature.CELSIUS @async_refresh_after async def async_set_temperature(self, **kwargs: Any) -> None: """Set target temperature.""" - await self._target_feature.set_value(int(kwargs[ATTR_TEMPERATURE])) + await self._thermostat_module.set_target_temperature( + float(kwargs[ATTR_TEMPERATURE]) + ) @async_refresh_after async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set hvac mode (heat/off).""" if hvac_mode is HVACMode.HEAT: - await self._state_feature.set_value(True) + await self._thermostat_module.set_state(True) elif hvac_mode is HVACMode.OFF: - await self._state_feature.set_value(False) + await self._thermostat_module.set_state(False) else: raise ServiceValidationError( translation_domain=DOMAIN, @@ -150,39 +173,33 @@ class TPLinkClimateEntity(CoordinatedTPLinkModuleEntity, ClimateEntity): @async_refresh_after async def async_turn_on(self) -> None: """Turn heating on.""" - await self._state_feature.set_value(True) + await self._thermostat_module.set_state(True) @async_refresh_after async def async_turn_off(self) -> None: """Turn heating off.""" - await self._state_feature.set_value(False) + await self._thermostat_module.set_state(False) @callback def _async_update_attrs(self) -> bool: """Update the entity's attributes.""" - self._attr_current_temperature = cast(float | None, self._temp_feature.value) - self._attr_target_temperature = cast(float | None, self._target_feature.value) + self._attr_current_temperature = self._thermostat_module.temperature + self._attr_target_temperature = self._thermostat_module.target_temperature self._attr_hvac_mode = ( - HVACMode.HEAT if self._state_feature.value else HVACMode.OFF + HVACMode.HEAT if self._thermostat_module.state else HVACMode.OFF ) if ( - self._mode_feature.value not in STATE_TO_ACTION + self._thermostat_module.mode not in STATE_TO_ACTION and self._attr_hvac_action is not HVACAction.OFF ): _LOGGER.warning( "Unknown thermostat state, defaulting to OFF: %s", - self._mode_feature.value, + self._thermostat_module.mode, ) self._attr_hvac_action = HVACAction.OFF return True - self._attr_hvac_action = STATE_TO_ACTION[ - cast(ThermostatState, self._mode_feature.value) - ] + self._attr_hvac_action = STATE_TO_ACTION[self._thermostat_module.mode] return True - - def _get_unique_id(self) -> str: - """Return unique id.""" - return f"{self._device.device_id}_climate" diff --git a/homeassistant/components/tplink/deprecate.py b/homeassistant/components/tplink/deprecate.py index 738f3d24c38..86d4f66cdc0 100644 --- a/homeassistant/components/tplink/deprecate.py +++ b/homeassistant/components/tplink/deprecate.py @@ -6,16 +6,20 @@ from collections.abc import Sequence from dataclasses import dataclass from typing import TYPE_CHECKING +from kasa import Device + from homeassistant.components.automation import automations_with_entity +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.script import scripts_with_entity from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from . import legacy_device_id from .const import DOMAIN if TYPE_CHECKING: - from .entity import CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription + from .entity import CoordinatedTPLinkEntity, TPLinkEntityDescription @dataclass(slots=True) @@ -30,7 +34,7 @@ class DeprecatedInfo: def async_check_create_deprecated( hass: HomeAssistant, unique_id: str, - entity_description: TPLinkFeatureEntityDescription, + entity_description: TPLinkEntityDescription, ) -> bool: """Return true if the entity should be created based on the deprecated_info. @@ -58,13 +62,21 @@ def async_check_create_deprecated( return not entity_entry.disabled -def async_cleanup_deprecated( +def async_process_deprecated( hass: HomeAssistant, - platform: str, + platform_domain: str, entry_id: str, - entities: Sequence[CoordinatedTPLinkFeatureEntity], + entities: Sequence[CoordinatedTPLinkEntity], + device: Device, ) -> None: - """Remove disabled deprecated entities or create issues if necessary.""" + """Process deprecated entities for a device. + + Create issues for deprececated entities that appear in automations. + Delete entities that are no longer provided by the integration either + because they have been removed at the end of the deprecation period, or + they are disabled by the user so the async_check_create_deprecated + returned false. + """ ent_reg = er.async_get(hass) for entity in entities: if not (deprecated_info := entity.entity_description.deprecated_info): @@ -72,7 +84,7 @@ def async_cleanup_deprecated( assert entity.unique_id entity_id = ent_reg.async_get_entity_id( - platform, + platform_domain, DOMAIN, entity.unique_id, ) @@ -94,17 +106,27 @@ def async_cleanup_deprecated( translation_placeholders={ "entity": entity_id, "info": item, - "platform": platform, + "platform": platform_domain, "new_platform": deprecated_info.new_platform, }, ) + # The light platform does not currently support cleaning up disabled + # deprecated entities because it uses two entity classes so a completeness + # check is not possible. It also uses the mac address as device id in some + # instances instead of device_id. + if platform_domain == LIGHT_DOMAIN: + return + # Remove entities that are no longer provided and have been disabled. + device_id = legacy_device_id(device) + unique_ids = {entity.unique_id for entity in entities} for entity_entry in er.async_entries_for_config_entry(ent_reg, entry_id): if ( - entity_entry.domain == platform + entity_entry.domain == platform_domain and entity_entry.disabled + and entity_entry.unique_id.startswith(device_id) and entity_entry.unique_id not in unique_ids ): ent_reg.async_remove(entity_entry.entity_id) diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index e7c3600acc2..edef8bd83a0 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -36,7 +36,11 @@ from .const import ( PRIMARY_STATE_ID, ) from .coordinator import TPLinkConfigEntry, TPLinkDataUpdateCoordinator -from .deprecate import DeprecatedInfo, async_check_create_deprecated +from .deprecate import ( + DeprecatedInfo, + async_check_create_deprecated, + async_process_deprecated, +) _LOGGER = logging.getLogger(__name__) @@ -102,6 +106,9 @@ class TPLinkModuleEntityDescription(TPLinkEntityDescription): """Base class for a TPLink module based entity description.""" exists_fn: Callable[[Device, TPLinkConfigEntry], bool] + unique_id_fn: Callable[[Device, TPLinkModuleEntityDescription], str] = ( + lambda device, desc: f"{legacy_device_id(device)}-{desc.key}" + ) def async_refresh_after[_T: CoordinatedTPLinkEntity, **_P]( @@ -151,6 +158,8 @@ class CoordinatedTPLinkEntity(CoordinatorEntity[TPLinkDataUpdateCoordinator], AB _attr_has_entity_name = True _device: Device + entity_description: TPLinkEntityDescription + def __init__( self, device: Device, @@ -235,7 +244,7 @@ class CoordinatedTPLinkEntity(CoordinatorEntity[TPLinkDataUpdateCoordinator], AB def _get_unique_id(self) -> str: """Return unique ID for the entity.""" - return legacy_device_id(self._device) + raise NotImplementedError async def async_added_to_hass(self) -> None: """Call update attributes after the device is added to the platform.""" @@ -405,6 +414,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): feature_type: Feature.Type, entity_class: type[_E], descriptions: Mapping[str, _D], + platform_domain: str, parent: Device | None = None, ) -> list[_E]: """Return a list of entities to add. @@ -439,6 +449,9 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): desc, ) ] + async_process_deprecated( + hass, platform_domain, coordinator.config_entry.entry_id, entities, device + ) return entities @classmethod @@ -454,6 +467,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): feature_type: Feature.Type, entity_class: type[_E], descriptions: Mapping[str, _D], + platform_domain: str, known_child_device_ids: set[str], first_check: bool, ) -> list[_E]: @@ -473,6 +487,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): feature_type=feature_type, entity_class=entity_class, descriptions=descriptions, + platform_domain=platform_domain, ) ) @@ -498,6 +513,7 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC): feature_type=feature_type, entity_class=entity_class, descriptions=descriptions, + platform_domain=platform_domain, parent=device, ) _LOGGER.debug( @@ -539,6 +555,11 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC): else: self._attr_name = get_device_name(device) + def _get_unique_id(self) -> str: + """Return unique ID for the entity.""" + desc = self.entity_description + return desc.unique_id_fn(self._device, desc) + @classmethod def _entities_for_device[ _E: CoordinatedTPLinkModuleEntity, @@ -551,6 +572,7 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC): *, entity_class: type[_E], descriptions: Iterable[_D], + platform_domain: str, parent: Device | None = None, ) -> list[_E]: """Return a list of entities to add.""" @@ -563,7 +585,15 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC): ) for description in descriptions if description.exists_fn(device, coordinator.config_entry) + and async_check_create_deprecated( + hass, + description.unique_id_fn(device, description), + description, + ) ] + async_process_deprecated( + hass, platform_domain, coordinator.config_entry.entry_id, entities, device + ) return entities @classmethod @@ -578,6 +608,7 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC): *, entity_class: type[_E], descriptions: Iterable[_D], + platform_domain: str, known_child_device_ids: set[str], first_check: bool, ) -> list[_E]: @@ -597,6 +628,7 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC): coordinator=coordinator, entity_class=entity_class, descriptions=descriptions, + platform_domain=platform_domain, ) ) has_parent_entities = bool(entities) @@ -621,6 +653,7 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC): coordinator=child_coordinator, entity_class=entity_class, descriptions=descriptions, + platform_domain=platform_domain, parent=device, ) _LOGGER.debug( diff --git a/homeassistant/components/tplink/fan.py b/homeassistant/components/tplink/fan.py index cb17955fbcb..1c31d84b778 100644 --- a/homeassistant/components/tplink/fan.py +++ b/homeassistant/components/tplink/fan.py @@ -1,5 +1,6 @@ """Support for TPLink Fan devices.""" +from collections.abc import Callable from dataclasses import dataclass import logging import math @@ -8,6 +9,7 @@ from typing import Any from kasa import Device, Module from homeassistant.components.fan import ( + DOMAIN as FAN_DOMAIN, FanEntity, FanEntityDescription, FanEntityFeature, @@ -20,7 +22,7 @@ from homeassistant.util.percentage import ( ) from homeassistant.util.scaling import int_states_in_range -from . import TPLinkConfigEntry +from . import TPLinkConfigEntry, legacy_device_id from .coordinator import TPLinkDataUpdateCoordinator from .entity import ( CoordinatedTPLinkModuleEntity, @@ -39,6 +41,12 @@ _LOGGER = logging.getLogger(__name__) class TPLinkFanEntityDescription(FanEntityDescription, TPLinkModuleEntityDescription): """Base class for fan entity description.""" + unique_id_fn: Callable[[Device, TPLinkModuleEntityDescription], str] = ( + lambda device, desc: legacy_device_id(device) + if desc.key == "fan" + else f"{legacy_device_id(device)}-{desc.key}" + ) + FAN_DESCRIPTIONS: tuple[TPLinkFanEntityDescription, ...] = ( TPLinkFanEntityDescription( @@ -68,6 +76,7 @@ async def async_setup_entry( coordinator=parent_coordinator, entity_class=TPLinkFanEntity, descriptions=FAN_DESCRIPTIONS, + platform_domain=FAN_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index aedbccfbd51..e00e8f69467 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -145,9 +145,6 @@ "temperature_offset": { "default": "mdi:contrast" }, - "target_temperature": { - "default": "mdi:thermometer" - }, "pan_step": { "default": "mdi:unfold-more-vertical" }, diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index bc4d792b3f8..c1311c256df 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -18,6 +18,7 @@ from homeassistant.components.light import ( ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, + DOMAIN as LIGHT_DOMAIN, EFFECT_OFF, ColorMode, LightEntity, @@ -141,12 +142,39 @@ def _async_build_base_effect( } +def _get_backwards_compatible_light_unique_id( + device: Device, entity_description: TPLinkModuleEntityDescription +) -> str: + """Return unique ID for the entity.""" + # For historical reasons the light platform uses the mac address as + # the unique id whereas all other platforms use device_id. + + # For backwards compat with pyHS100 + if device.device_type is DeviceType.Dimmer and isinstance(device, IotDevice): + # Dimmers used to use the switch format since + # pyHS100 treated them as SmartPlug but the old code + # created them as lights + # https://github.com/home-assistant/core/blob/2021.9.7/ \ + # homeassistant/components/tplink/common.py#L86 + return legacy_device_id(device) + + # Newer devices can have child lights. While there isn't currently + # an example of a device with more than one light we use the device_id + # for consistency and future proofing + if device.parent or device.children: + return legacy_device_id(device) + + return device.mac.replace(":", "").upper() + + @dataclass(frozen=True, kw_only=True) class TPLinkLightEntityDescription( LightEntityDescription, TPLinkModuleEntityDescription ): """Base class for tplink light entity description.""" + unique_id_fn = _get_backwards_compatible_light_unique_id + LIGHT_DESCRIPTIONS: tuple[TPLinkLightEntityDescription, ...] = ( TPLinkLightEntityDescription( @@ -186,6 +214,7 @@ async def async_setup_entry( coordinator=parent_coordinator, entity_class=TPLinkLightEntity, descriptions=LIGHT_DESCRIPTIONS, + platform_domain=LIGHT_DOMAIN, known_child_device_ids=known_child_device_ids_light, first_check=first_check, ) @@ -196,6 +225,7 @@ async def async_setup_entry( coordinator=parent_coordinator, entity_class=TPLinkLightEffectEntity, descriptions=LIGHT_EFFECT_DESCRIPTIONS, + platform_domain=LIGHT_DOMAIN, known_child_device_ids=known_child_device_ids_light_effect, first_check=first_check, ) @@ -242,29 +272,6 @@ class TPLinkLightEntity(CoordinatedTPLinkModuleEntity, LightEntity): # If the light supports only a single color mode, set it now self._fixed_color_mode = next(iter(self._attr_supported_color_modes)) - def _get_unique_id(self) -> str: - """Return unique ID for the entity.""" - # For historical reasons the light platform uses the mac address as - # the unique id whereas all other platforms use device_id. - device = self._device - - # For backwards compat with pyHS100 - if device.device_type is DeviceType.Dimmer and isinstance(device, IotDevice): - # Dimmers used to use the switch format since - # pyHS100 treated them as SmartPlug but the old code - # created them as lights - # https://github.com/home-assistant/core/blob/2021.9.7/ \ - # homeassistant/components/tplink/common.py#L86 - return legacy_device_id(device) - - # Newer devices can have child lights. While there isn't currently - # an example of a device with more than one light we use the device_id - # for consistency and future proofing - if self._parent or device.children: - return legacy_device_id(device) - - return device.mac.replace(":", "").upper() - @callback def _async_extract_brightness_transition( self, **kwargs: Any diff --git a/homeassistant/components/tplink/number.py b/homeassistant/components/tplink/number.py index 97152ef4da8..0af2b7403e8 100644 --- a/homeassistant/components/tplink/number.py +++ b/homeassistant/components/tplink/number.py @@ -18,7 +18,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry -from .deprecate import async_cleanup_deprecated from .entity import ( CoordinatedTPLinkFeatureEntity, TPLinkDataUpdateCoordinator, @@ -91,10 +90,10 @@ async def async_setup_entry( feature_type=Feature.Type.Number, entity_class=TPLinkNumberEntity, descriptions=NUMBER_DESCRIPTIONS_MAP, + platform_domain=NUMBER_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) - async_cleanup_deprecated(hass, NUMBER_DOMAIN, config_entry.entry_id, entities) async_add_entities(entities) _check_device() diff --git a/homeassistant/components/tplink/select.py b/homeassistant/components/tplink/select.py index a443546fdaa..8e9dee7b964 100644 --- a/homeassistant/components/tplink/select.py +++ b/homeassistant/components/tplink/select.py @@ -16,7 +16,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry -from .deprecate import async_cleanup_deprecated from .entity import ( CoordinatedTPLinkFeatureEntity, TPLinkDataUpdateCoordinator, @@ -71,10 +70,10 @@ async def async_setup_entry( feature_type=Feature.Type.Choice, entity_class=TPLinkSelectEntity, descriptions=SELECT_DESCRIPTIONS_MAP, + platform_domain=SELECT_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) - async_cleanup_deprecated(hass, SELECT_DOMAIN, config_entry.entry_id, entities) async_add_entities(entities) _check_device() diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py index 0898a3379d1..aaba6b2674d 100644 --- a/homeassistant/components/tplink/sensor.py +++ b/homeassistant/components/tplink/sensor.py @@ -19,7 +19,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry from .const import UNIT_MAPPING -from .deprecate import async_cleanup_deprecated from .entity import CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription @@ -141,10 +140,10 @@ async def async_setup_entry( feature_type=Feature.Type.Sensor, entity_class=TPLinkSensorEntity, descriptions=SENSOR_DESCRIPTIONS_MAP, + platform_domain=SENSOR_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) - async_cleanup_deprecated(hass, SENSOR_DOMAIN, config_entry.entry_id, entities) async_add_entities(entities) _check_device() diff --git a/homeassistant/components/tplink/siren.py b/homeassistant/components/tplink/siren.py index 0c15477ee78..5931a508d6c 100644 --- a/homeassistant/components/tplink/siren.py +++ b/homeassistant/components/tplink/siren.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from typing import Any @@ -9,6 +10,7 @@ from kasa import Device, Module from kasa.smart.modules.alarm import Alarm from homeassistant.components.siren import ( + DOMAIN as SIREN_DOMAIN, SirenEntity, SirenEntityDescription, SirenEntityFeature, @@ -16,7 +18,7 @@ from homeassistant.components.siren import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TPLinkConfigEntry +from . import TPLinkConfigEntry, legacy_device_id from .coordinator import TPLinkDataUpdateCoordinator from .entity import ( CoordinatedTPLinkModuleEntity, @@ -35,6 +37,12 @@ class TPLinkSirenEntityDescription( ): """Base class for siren entity description.""" + unique_id_fn: Callable[[Device, TPLinkModuleEntityDescription], str] = ( + lambda device, desc: legacy_device_id(device) + if desc.key == "siren" + else f"{legacy_device_id(device)}-{desc.key}" + ) + SIREN_DESCRIPTIONS: tuple[TPLinkSirenEntityDescription, ...] = ( TPLinkSirenEntityDescription( @@ -64,6 +72,7 @@ async def async_setup_entry( coordinator=parent_coordinator, entity_class=TPLinkSirenEntity, descriptions=SIREN_DESCRIPTIONS, + platform_domain=SIREN_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) diff --git a/homeassistant/components/tplink/switch.py b/homeassistant/components/tplink/switch.py index 92ecd7992de..04ca95273af 100644 --- a/homeassistant/components/tplink/switch.py +++ b/homeassistant/components/tplink/switch.py @@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TPLinkConfigEntry -from .deprecate import async_cleanup_deprecated from .entity import ( CoordinatedTPLinkFeatureEntity, TPLinkFeatureEntityDescription, @@ -100,10 +99,10 @@ async def async_setup_entry( feature_type=Feature.Switch, entity_class=TPLinkSwitch, descriptions=SWITCH_DESCRIPTIONS_MAP, + platform_domain=SWITCH_DOMAIN, known_child_device_ids=known_child_device_ids, first_check=first_check, ) - async_cleanup_deprecated(hass, SWITCH_DOMAIN, config_entry.entry_id, entities) async_add_entities(entities) _check_device() diff --git a/homeassistant/components/twitch/coordinator.py b/homeassistant/components/twitch/coordinator.py index c61e80bd2b8..010a9e90ccc 100644 --- a/homeassistant/components/twitch/coordinator.py +++ b/homeassistant/components/twitch/coordinator.py @@ -122,7 +122,7 @@ class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]): stream.game_name if stream else None, stream.title if stream else None, stream.started_at if stream else None, - stream.thumbnail_url if stream else None, + stream.thumbnail_url.format(width="", height="") if stream else None, channel.profile_image_url, bool(sub), sub.is_gift if sub else None, diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 018a600f037..69c7f8b205b 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==7.4.1", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==7.5.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index 0ca103300da..df4daa8782c 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.42.0", "getmac==0.9.5"], + "requirements": ["async-upnp-client==0.43.0", "getmac==0.9.5"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/vesync/humidifier.py b/homeassistant/components/vesync/humidifier.py index 9c54afdfb82..3d89d5dc6db 100644 --- a/homeassistant/components/vesync/humidifier.py +++ b/homeassistant/components/vesync/humidifier.py @@ -6,7 +6,6 @@ from typing import Any from pyvesync.vesyncbasedevice import VeSyncBaseDevice from homeassistant.components.humidifier import ( - ATTR_HUMIDITY, MODE_AUTO, MODE_NORMAL, MODE_SLEEP, @@ -40,8 +39,6 @@ _LOGGER = logging.getLogger(__name__) MIN_HUMIDITY = 30 MAX_HUMIDITY = 80 -VS_TO_HA_ATTRIBUTES = {ATTR_HUMIDITY: "current_humidity"} - VS_TO_HA_MODE_MAP = { VS_HUMIDIFIER_MODE_AUTO: MODE_AUTO, VS_HUMIDIFIER_MODE_HUMIDITY: MODE_AUTO, @@ -49,8 +46,6 @@ VS_TO_HA_MODE_MAP = { VS_HUMIDIFIER_MODE_SLEEP: MODE_SLEEP, } -HA_TO_VS_MODE_MAP = {v: k for k, v in VS_TO_HA_MODE_MAP.items()} - async def async_setup_entry( hass: HomeAssistant, @@ -92,10 +87,6 @@ def _get_ha_mode(vs_mode: str) -> str | None: return ha_mode -def _get_vs_mode(ha_mode: str) -> str | None: - return HA_TO_VS_MODE_MAP.get(ha_mode) - - class VeSyncHumidifierHA(VeSyncBaseEntity, HumidifierEntity): """Representation of a VeSync humidifier.""" @@ -108,14 +99,35 @@ class VeSyncHumidifierHA(VeSyncBaseEntity, HumidifierEntity): device: VeSyncHumidifierDevice + def __init__( + self, + device: VeSyncBaseDevice, + coordinator: VeSyncDataCoordinator, + ) -> None: + """Initialize the VeSyncHumidifierHA device.""" + super().__init__(device, coordinator) + + # 2 Vesync humidifier modes (humidity and auto) maps to the HA mode auto. + # They are on different devices though. We need to map HA mode to the + # device specific mode when setting it. + + self._ha_to_vs_mode_map: dict[str, str] = {} + self._available_modes: list[str] = [] + + # Populate maps once. + for vs_mode in self.device.mist_modes: + ha_mode = _get_ha_mode(vs_mode) + if ha_mode: + self._available_modes.append(ha_mode) + self._ha_to_vs_mode_map[ha_mode] = vs_mode + + def _get_vs_mode(self, ha_mode: str) -> str | None: + return self._ha_to_vs_mode_map.get(ha_mode) + @property def available_modes(self) -> list[str]: """Return the available mist modes.""" - return [ - ha_mode - for ha_mode in (_get_ha_mode(vs_mode) for vs_mode in self.device.mist_modes) - if ha_mode - ] + return self._available_modes @property def target_humidity(self) -> int: @@ -140,9 +152,15 @@ class VeSyncHumidifierHA(VeSyncBaseEntity, HumidifierEntity): raise HomeAssistantError( "{mode} is not one of the valid available modes: {self.available_modes}" ) - if not self.device.set_humidity_mode(_get_vs_mode(mode)): + if not self.device.set_humidity_mode(self._get_vs_mode(mode)): raise HomeAssistantError(f"An error occurred while setting mode {mode}.") + # Changing mode while humidifier is off actually turns it on, as per the app. But + # the library does not seem to update the device_status. It is also possible that + # other attributes get updated. Scheduling a forced refresh to get device status. + # updated. + self.schedule_update_ha_state(force_refresh=True) + def turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" success = self.device.turn_on() diff --git a/homeassistant/components/vesync/manifest.json b/homeassistant/components/vesync/manifest.json index 81fb1a764f0..cdb5ed96652 100644 --- a/homeassistant/components/vesync/manifest.json +++ b/homeassistant/components/vesync/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/vesync", "iot_class": "cloud_polling", "loggers": ["pyvesync"], - "requirements": ["pyvesync==2.1.15"] + "requirements": ["pyvesync==2.1.16"] } diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 98ff6ce4c82..766cf22cb94 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.39.1"] + "requirements": ["PyViCare==2.41.0"] } diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index ba0191c5cd2..14624be2b6d 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -30,6 +30,7 @@ from homeassistant.const import ( EntityCategory, UnitOfEnergy, UnitOfPower, + UnitOfPressure, UnitOfTemperature, UnitOfTime, UnitOfVolume, @@ -836,6 +837,31 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( "forcedlevelfour", ], ), + ViCareSensorEntityDescription( + key="supply_pressure", + translation_key="supply_pressure", + device_class=SensorDeviceClass.PRESSURE, + native_unit_of_measurement=UnitOfPressure.BAR, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + value_getter=lambda api: api.getSupplyPressure(), + unit_getter=lambda api: api.getSupplyPressureUnit(), + ), + ViCareSensorEntityDescription( + key="heating_rod_starts", + translation_key="heating_rod_starts", + value_getter=lambda api: api.getHeatingRodStarts(), + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + ViCareSensorEntityDescription( + key="heating_rod_hours", + translation_key="heating_rod_hours", + native_unit_of_measurement=UnitOfTime.HOURS, + value_getter=lambda api: api.getHeatingRodHours(), + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.TOTAL_INCREASING, + ), ) CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 074c994d4a5..5ab92880ba0 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -455,6 +455,15 @@ "silent": "Silent", "forcedlevelfour": "Boost" } + }, + "supply_pressure": { + "name": "Supply pressure" + }, + "heating_rod_starts": { + "name": "Heating rod starts" + }, + "heating_rod_hours": { + "name": "Heating rod hours" } }, "water_heater": { diff --git a/homeassistant/components/vizio/strings.json b/homeassistant/components/vizio/strings.json index 6091cd72f3f..2f97bb332e8 100644 --- a/homeassistant/components/vizio/strings.json +++ b/homeassistant/components/vizio/strings.json @@ -6,7 +6,7 @@ "data": { "name": "[%key:common::config_flow::data::name%]", "host": "[%key:common::config_flow::data::host%]", - "device_class": "Device Type", + "device_class": "Device type", "access_token": "[%key:common::config_flow::data::access_token%]" }, "data_description": { @@ -14,25 +14,25 @@ } }, "pair_tv": { - "title": "Complete Pairing Process", + "title": "Complete pairing process", "description": "Your TV should be displaying a code. Enter that code into the form and then continue to the next step to complete the pairing.", "data": { "pin": "[%key:common::config_flow::data::pin%]" } }, "pairing_complete": { - "title": "Pairing Complete", - "description": "Your VIZIO SmartCast Device is now connected to Home Assistant." + "title": "Pairing complete", + "description": "Your VIZIO SmartCast device is now connected to Home Assistant." }, "pairing_complete_import": { "title": "[%key:component::vizio::config::step::pairing_complete::title%]", - "description": "Your VIZIO SmartCast Device is now connected to Home Assistant.\n\nYour access token is '**{access_token}**'." + "description": "Your VIZIO SmartCast device is now connected to Home Assistant.\n\nYour access token is '**{access_token}**'." } }, "error": { "complete_pairing_failed": "Unable to complete pairing. Ensure the PIN you provided is correct and the TV is still powered and connected to the network before resubmitting.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "existing_config_entry_found": "An existing VIZIO SmartCast Device config entry with the same serial number has already been configured. You must delete the existing entry in order to configure this one." + "existing_config_entry_found": "An existing VIZIO SmartCast device config entry with the same serial number has already been configured. You must delete the existing entry in order to configure this one." }, "abort": { "already_configured_device": "[%key:common::config_flow::abort::already_configured_device%]", @@ -43,12 +43,12 @@ "options": { "step": { "init": { - "title": "Update VIZIO SmartCast Device Options", + "title": "Update VIZIO SmartCast device options", "description": "If you have a Smart TV, you can optionally filter your source list by choosing which apps to include or exclude in your source list.", "data": { - "volume_step": "Volume Step Size", - "include_or_exclude": "Include or Exclude Apps?", - "apps_to_include_or_exclude": "Apps to Include or Exclude" + "volume_step": "Volume step size", + "include_or_exclude": "Include or exclude apps?", + "apps_to_include_or_exclude": "Apps to include or exclude" } } } diff --git a/homeassistant/components/webostv/__init__.py b/homeassistant/components/webostv/__init__.py index 3a3ee8e4c7e..c1a1c698f92 100644 --- a/homeassistant/components/webostv/__init__.py +++ b/homeassistant/components/webostv/__init__.py @@ -1,14 +1,12 @@ -"""Support for LG webOS Smart TV.""" +"""The LG webOS TV integration.""" from __future__ import annotations from contextlib import suppress -import logging from aiowebostv import WebOsClient, WebOsTvPairError from homeassistant.components import notify as hass_notify -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_CLIENT_SECRET, CONF_HOST, @@ -19,6 +17,7 @@ from homeassistant.const import ( from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import config_validation as cv, discovery +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from .const import ( @@ -28,17 +27,13 @@ from .const import ( PLATFORMS, WEBOSTV_EXCEPTIONS, ) +from .helpers import WebOsTvConfigEntry, update_client_key CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -_LOGGER = logging.getLogger(__name__) - -type WebOsTvConfigEntry = ConfigEntry[WebOsClient] - - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the LG WebOS TV platform.""" + """Set up the LG webOS TV platform.""" hass.data.setdefault(DOMAIN, {DATA_HASS_CONFIG: config}) return True @@ -50,7 +45,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: WebOsTvConfigEntry) -> b key = entry.data[CONF_CLIENT_SECRET] # Attempt a connection, but fail gracefully if tv is off for example. - entry.runtime_data = client = WebOsClient(host, key) + entry.runtime_data = client = WebOsClient( + host, key, client_session=async_get_clientsession(hass) + ) with suppress(*WEBOSTV_EXCEPTIONS): try: await client.connect() @@ -59,7 +56,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: WebOsTvConfigEntry) -> b # If pairing request accepted there will be no error # Update the stored key without triggering reauth - update_client_key(hass, entry, client) + update_client_key(hass, entry) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -96,31 +93,6 @@ async def async_update_options(hass: HomeAssistant, entry: WebOsTvConfigEntry) - await hass.config_entries.async_reload(entry.entry_id) -async def async_control_connect(host: str, key: str | None) -> WebOsClient: - """LG Connection.""" - client = WebOsClient(host, key) - try: - await client.connect() - except WebOsTvPairError: - _LOGGER.warning("Connected to LG webOS TV %s but not paired", host) - raise - - return client - - -def update_client_key( - hass: HomeAssistant, entry: ConfigEntry, client: WebOsClient -) -> None: - """Check and update stored client key if key has changed.""" - host = entry.data[CONF_HOST] - key = entry.data[CONF_CLIENT_SECRET] - - if client.client_key != key: - _LOGGER.debug("Updating client key for host %s", host) - data = {CONF_HOST: host, CONF_CLIENT_SECRET: client.client_key} - hass.config_entries.async_update_entry(entry, data=data) - - async def async_unload_entry(hass: HomeAssistant, entry: WebOsTvConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): diff --git a/homeassistant/components/webostv/config_flow.py b/homeassistant/components/webostv/config_flow.py index 6086fad8afd..fbc3eb958dd 100644 --- a/homeassistant/components/webostv/config_flow.py +++ b/homeassistant/components/webostv/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow to configure webostv component.""" +"""Config flow for LG webOS TV integration.""" from __future__ import annotations @@ -6,22 +6,23 @@ from collections.abc import Mapping from typing import Any, Self from urllib.parse import urlparse -from aiowebostv import WebOsTvPairError +from aiowebostv import WebOsClient, WebOsTvPairError import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.service_info.ssdp import ( ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN, SsdpServiceInfo, ) -from . import WebOsTvConfigEntry, async_control_connect +from . import WebOsTvConfigEntry from .const import CONF_SOURCES, DEFAULT_NAME, DOMAIN, WEBOSTV_EXCEPTIONS -from .helpers import async_get_sources +from .helpers import get_sources DATA_SCHEMA = vol.Schema( { @@ -31,8 +32,23 @@ DATA_SCHEMA = vol.Schema( ) +async def async_control_connect( + hass: HomeAssistant, host: str, key: str | None +) -> WebOsClient: + """Create LG webOS client and connect to the TV.""" + client = WebOsClient( + host, + key, + client_session=async_get_clientsession(hass), + ) + + await client.connect() + + return client + + class FlowHandler(ConfigFlow, domain=DOMAIN): - """WebosTV configuration flow.""" + """LG webOS TV configuration flow.""" VERSION = 1 @@ -69,7 +85,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): if user_input is not None: try: - client = await async_control_connect(self._host, None) + client = await async_control_connect(self.hass, self._host, None) except WebOsTvPairError: errors["base"] = "error_pairing" except WEBOSTV_EXCEPTIONS: @@ -130,7 +146,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): if user_input is not None: try: - client = await async_control_connect(self._host, None) + client = await async_control_connect(self.hass, self._host, None) except WebOsTvPairError: errors["base"] = "error_pairing" except WEBOSTV_EXCEPTIONS: @@ -154,7 +170,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): client_key = reconfigure_entry.data.get(CONF_CLIENT_SECRET) try: - client = await async_control_connect(host, client_key) + client = await async_control_connect(self.hass, host, client_key) except WebOsTvPairError: errors["base"] = "error_pairing" except WEBOSTV_EXCEPTIONS: @@ -195,9 +211,14 @@ class OptionsFlowHandler(OptionsFlow): options_input = {CONF_SOURCES: user_input[CONF_SOURCES]} return self.async_create_entry(title="", data=options_input) # Get sources - sources_list = await async_get_sources(self.host, self.key) - if not sources_list: - errors["base"] = "cannot_retrieve" + sources_list = [] + try: + client = await async_control_connect(self.hass, self.host, self.key) + sources_list = get_sources(client) + except WebOsTvPairError: + errors["base"] = "error_pairing" + except WEBOSTV_EXCEPTIONS: + errors["base"] = "cannot_connect" option_sources = self.config_entry.options.get(CONF_SOURCES, []) sources = [s for s in option_sources if s in sources_list] diff --git a/homeassistant/components/webostv/const.py b/homeassistant/components/webostv/const.py index 65d964d8fd4..e505611db52 100644 --- a/homeassistant/components/webostv/const.py +++ b/homeassistant/components/webostv/const.py @@ -1,9 +1,9 @@ -"""Constants used for LG webOS Smart TV.""" +"""Constants for the LG webOS TV integration.""" import asyncio +import aiohttp from aiowebostv import WebOsTvCommandError -from websockets.exceptions import ConnectionClosed, ConnectionClosedOK from homeassistant.const import Platform @@ -27,11 +27,10 @@ SERVICE_SELECT_SOUND_OUTPUT = "select_sound_output" LIVE_TV_APP_ID = "com.webos.app.livetv" WEBOSTV_EXCEPTIONS = ( - OSError, - ConnectionClosed, - ConnectionClosedOK, - ConnectionRefusedError, + ConnectionResetError, WebOsTvCommandError, - TimeoutError, + aiohttp.ClientConnectorError, + aiohttp.ServerDisconnectedError, asyncio.CancelledError, + asyncio.TimeoutError, ) diff --git a/homeassistant/components/webostv/device_trigger.py b/homeassistant/components/webostv/device_trigger.py index 877c607f939..951c11525b1 100644 --- a/homeassistant/components/webostv/device_trigger.py +++ b/homeassistant/components/webostv/device_trigger.py @@ -1,4 +1,4 @@ -"""Provides device automations for control of LG webOS Smart TV.""" +"""Provides device automations for control of LG webOS TV.""" from __future__ import annotations @@ -14,7 +14,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import trigger +from . import DOMAIN, trigger from .helpers import ( async_get_client_by_device_entry, async_get_device_entry_by_device_id, @@ -75,4 +75,8 @@ async def async_attach_trigger( hass, trigger_config, action, trigger_info ) - raise HomeAssistantError(f"Unhandled trigger type {trigger_type}") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unhandled_trigger_type", + translation_placeholders={"trigger_type": trigger_type}, + ) diff --git a/homeassistant/components/webostv/diagnostics.py b/homeassistant/components/webostv/diagnostics.py index d5e2dac06dc..7fb64a2cb8f 100644 --- a/homeassistant/components/webostv/diagnostics.py +++ b/homeassistant/components/webostv/diagnostics.py @@ -1,4 +1,4 @@ -"""Diagnostics support for LG webOS Smart TV.""" +"""Diagnostics support for LG webOS TV.""" from __future__ import annotations diff --git a/homeassistant/components/webostv/helpers.py b/homeassistant/components/webostv/helpers.py index 3aea860798a..3c509a56d1e 100644 --- a/homeassistant/components/webostv/helpers.py +++ b/homeassistant/components/webostv/helpers.py @@ -1,16 +1,23 @@ -"""Helper functions for webOS Smart TV.""" +"""Helper functions for LG webOS TV.""" from __future__ import annotations +import logging + from aiowebostv import WebOsClient -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceEntry -from . import WebOsTvConfigEntry, async_control_connect -from .const import DOMAIN, LIVE_TV_APP_ID, WEBOSTV_EXCEPTIONS +from .const import DOMAIN, LIVE_TV_APP_ID + +_LOGGER = logging.getLogger(__name__) + +type WebOsTvConfigEntry = ConfigEntry[WebOsClient] @callback @@ -32,7 +39,7 @@ def async_get_device_entry_by_device_id( def async_get_device_id_from_entity_id(hass: HomeAssistant, entity_id: str) -> str: """Get device ID from an entity ID. - Raises ValueError if entity or device ID is invalid. + Raises HomeAssistantError if entity or device ID is invalid. """ ent_reg = er.async_get(hass) entity_entry = ent_reg.async_get(entity_id) @@ -42,7 +49,11 @@ def async_get_device_id_from_entity_id(hass: HomeAssistant, entity_id: str) -> s or entity_entry.device_id is None or entity_entry.platform != DOMAIN ): - raise ValueError(f"Entity {entity_id} is not a valid {DOMAIN} entity.") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="invalid_entity_id", + translation_placeholders={"entity_id": entity_id}, + ) return entity_entry.device_id @@ -72,13 +83,8 @@ def async_get_client_by_device_entry( ) -async def async_get_sources(host: str, key: str) -> list[str]: +def get_sources(client: WebOsClient) -> list[str]: """Construct sources list.""" - try: - client = await async_control_connect(host, key) - except WEBOSTV_EXCEPTIONS: - return [] - sources = [] found_live_tv = False for app in client.apps.values(): @@ -96,3 +102,15 @@ async def async_get_sources(host: str, key: str) -> list[str]: # Preserve order when filtering duplicates return list(dict.fromkeys(sources)) + + +def update_client_key(hass: HomeAssistant, entry: WebOsTvConfigEntry) -> None: + """Check and update stored client key if key has changed.""" + client: WebOsClient = entry.runtime_data + host = entry.data[CONF_HOST] + key = entry.data[CONF_CLIENT_SECRET] + + if client.client_key != key: + _LOGGER.debug("Updating client key for host %s", host) + data = {CONF_HOST: host, CONF_CLIENT_SECRET: client.client_key} + hass.config_entries.async_update_entry(entry, data=data) diff --git a/homeassistant/components/webostv/manifest.json b/homeassistant/components/webostv/manifest.json index 627bb83572c..f1a8e163398 100644 --- a/homeassistant/components/webostv/manifest.json +++ b/homeassistant/components/webostv/manifest.json @@ -1,12 +1,12 @@ { "domain": "webostv", - "name": "LG webOS Smart TV", + "name": "LG webOS TV", "codeowners": ["@thecode"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/webostv", "iot_class": "local_push", "loggers": ["aiowebostv"], - "requirements": ["aiowebostv==0.5.0"], + "requirements": ["aiowebostv==0.6.0"], "ssdp": [ { "st": "urn:lge-com:service:webos-second-screen:1" diff --git a/homeassistant/components/webostv/media_player.py b/homeassistant/components/webostv/media_player.py index a03449a49b6..4b39841e29d 100644 --- a/homeassistant/components/webostv/media_player.py +++ b/homeassistant/components/webostv/media_player.py @@ -1,4 +1,4 @@ -"""Support for interface with an LG webOS Smart TV.""" +"""Support for interface with an LG webOS TV.""" from __future__ import annotations @@ -33,7 +33,6 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.trigger import PluggableAction from homeassistant.helpers.typing import VolDictType -from . import WebOsTvConfigEntry, update_client_key from .const import ( ATTR_BUTTON, ATTR_PAYLOAD, @@ -46,6 +45,7 @@ from .const import ( SERVICE_SELECT_SOUND_OUTPUT, WEBOSTV_EXCEPTIONS, ) +from .helpers import WebOsTvConfigEntry, update_client_key from .triggers.turn_on import async_get_turn_on_trigger _LOGGER = logging.getLogger(__name__) @@ -89,7 +89,7 @@ async def async_setup_entry( entry: WebOsTvConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Set up the LG webOS Smart TV platform.""" + """Set up the LG webOS TV platform.""" platform = entity_platform.async_get_current_platform() for service_name, schema, method in SERVICES: @@ -106,27 +106,33 @@ def cmd[_T: LgWebOSMediaPlayerEntity, **_P]( @wraps(func) async def cmd_wrapper(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None: """Wrap all command methods.""" + if self.state is MediaPlayerState.OFF: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="device_off", + translation_placeholders={ + "name": str(self._entry.title), + "func": func.__name__, + }, + ) try: await func(self, *args, **kwargs) - except WEBOSTV_EXCEPTIONS as exc: - if self.state != MediaPlayerState.OFF: - raise HomeAssistantError( - f"Error calling {func.__name__} on entity {self.entity_id}," - f" state:{self.state}" - ) from exc - _LOGGER.warning( - "Error calling %s on entity %s, state:%s, error: %r", - func.__name__, - self.entity_id, - self.state, - exc, - ) + except WEBOSTV_EXCEPTIONS as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={ + "name": str(self._entry.title), + "func": func.__name__, + "error": str(error), + }, + ) from error return cmd_wrapper class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): - """Representation of a LG webOS Smart TV.""" + """Representation of a LG webOS TV.""" _attr_device_class = MediaPlayerDeviceClass.TV _attr_has_entity_name = True @@ -329,7 +335,7 @@ class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): except WebOsTvPairError: self._entry.async_start_reauth(self.hass) else: - update_client_key(self.hass, self._entry, self._client) + update_client_key(self.hass, self._entry) @property def supported_features(self) -> MediaPlayerEntityFeature: @@ -386,10 +392,14 @@ class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): async def async_select_source(self, source: str) -> None: """Select input source.""" if (source_dict := self._source_list.get(source)) is None: - _LOGGER.warning( - "Source %s not found for %s", source, self._friendly_name_internal() + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="source_not_found", + translation_placeholders={ + "source": source, + "name": str(self._friendly_name_internal()), + }, ) - return if source_dict.get("title"): await self._client.launch_app(source_dict["id"]) elif source_dict.get("label"): diff --git a/homeassistant/components/webostv/notify.py b/homeassistant/components/webostv/notify.py index fde0e6ad607..2393cb4cd07 100644 --- a/homeassistant/components/webostv/notify.py +++ b/homeassistant/components/webostv/notify.py @@ -1,20 +1,19 @@ -"""Support for LG WebOS TV notification service.""" +"""Support for LG webOS TV notification service.""" from __future__ import annotations -import logging from typing import Any -from aiowebostv import WebOsClient, WebOsTvPairError +from aiowebostv import WebOsClient from homeassistant.components.notify import ATTR_DATA, BaseNotificationService from homeassistant.const import ATTR_ICON from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import ATTR_CONFIG_ENTRY_ID, WEBOSTV_EXCEPTIONS - -_LOGGER = logging.getLogger(__name__) +from . import WebOsTvConfigEntry +from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, WEBOSTV_EXCEPTIONS PARALLEL_UPDATES = 0 @@ -34,28 +33,48 @@ async def async_get_service( ) assert config_entry is not None - return LgWebOSNotificationService(config_entry.runtime_data) + return LgWebOSNotificationService(config_entry) class LgWebOSNotificationService(BaseNotificationService): - """Implement the notification service for LG WebOS TV.""" + """Implement the notification service for LG webOS TV.""" - def __init__(self, client: WebOsClient) -> None: + def __init__(self, entry: WebOsTvConfigEntry) -> None: """Initialize the service.""" - self._client = client + self._entry = entry async def async_send_message(self, message: str = "", **kwargs: Any) -> None: """Send a message to the tv.""" - try: - if not self._client.is_connected(): - await self._client.connect() + client: WebOsClient = self._entry.runtime_data + data = kwargs[ATTR_DATA] + icon_path = data.get(ATTR_ICON) if data else None - data = kwargs[ATTR_DATA] - icon_path = data.get(ATTR_ICON) if data else None - await self._client.send_message(message, icon_path=icon_path) - except WebOsTvPairError: - _LOGGER.error("Pairing with TV failed") - except FileNotFoundError: - _LOGGER.error("Icon %s not found", icon_path) - except WEBOSTV_EXCEPTIONS: - _LOGGER.error("TV unreachable") + if not client.is_on: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="notify_device_off", + translation_placeholders={ + "name": str(self._entry.title), + "func": __name__, + }, + ) + try: + await client.send_message(message, icon_path=icon_path) + except FileNotFoundError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="notify_icon_not_found", + translation_placeholders={ + "name": str(self._entry.title), + "icon_path": str(icon_path), + }, + ) from error + except WEBOSTV_EXCEPTIONS as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="notify_communication_error", + translation_placeholders={ + "name": str(self._entry.title), + "error": str(error), + }, + ) from error diff --git a/homeassistant/components/webostv/quality_scale.yaml b/homeassistant/components/webostv/quality_scale.yaml index 1b3a3173ffa..70f845404cd 100644 --- a/homeassistant/components/webostv/quality_scale.yaml +++ b/homeassistant/components/webostv/quality_scale.yaml @@ -9,12 +9,10 @@ rules: config-flow-test-coverage: done config-flow: done dependency-transparency: done - docs-actions: - status: todo - comment: add description for parameters + docs-actions: done docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done entity-event-setup: done entity-unique-id: done has-entity-name: done @@ -24,10 +22,10 @@ rules: unique-config-entry: done # Silver - action-exceptions: todo + action-exceptions: done config-entry-unloading: done - docs-configuration-parameters: todo - docs-installation-parameters: todo + docs-configuration-parameters: done + docs-installation-parameters: done entity-unavailable: todo integration-owner: done log-when-unavailable: todo @@ -40,13 +38,13 @@ rules: diagnostics: done discovery-update-info: done discovery: done - docs-data-update: todo - docs-examples: todo - docs-known-limitations: todo - docs-supported-devices: todo - docs-supported-functions: todo - docs-troubleshooting: todo - docs-use-cases: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done dynamic-devices: status: exempt comment: The integration connects to a single device. @@ -60,7 +58,7 @@ rules: entity-translations: status: exempt comment: There are no entities to translate. - exception-translations: todo + exception-translations: done icon-translations: status: exempt comment: The only entity can use the device class. @@ -74,7 +72,5 @@ rules: # Platinum async-dependency: done - inject-websession: - status: todo - comment: need to check if it is needed for websockets or migrate to aiohttp + inject-websession: done strict-typing: done diff --git a/homeassistant/components/webostv/strings.json b/homeassistant/components/webostv/strings.json index b0786bd06de..f6d033af632 100644 --- a/homeassistant/components/webostv/strings.json +++ b/homeassistant/components/webostv/strings.json @@ -12,7 +12,7 @@ } }, "pairing": { - "title": "webOS TV Pairing", + "title": "LG webOS TV Pairing", "description": "Select **Submit** and accept the pairing request on your TV.\n\n![Image](/static/images/config_webos.png)" }, "reauth_confirm": { @@ -43,7 +43,7 @@ "options": { "step": { "init": { - "title": "Options for webOS Smart TV", + "title": "Options for LG webOS TV", "description": "Select enabled sources", "data": { "sources": "Sources list" @@ -54,7 +54,8 @@ } }, "error": { - "cannot_retrieve": "Unable to retrieve the list of sources. Make sure device is switched on" + "cannot_connect": "[%key:component::webostv::config::error::cannot_connect%]", + "error_pairing": "[%key:component::webostv::config::error::error_pairing%]" } }, "device_automation": { @@ -109,5 +110,34 @@ } } } + }, + "exceptions": { + "device_off": { + "message": "Error calling {func} for device {name}: Device is off and cannot be controlled." + }, + "communication_error": { + "message": "Communication error while calling {func} for device {name}: {error}" + }, + "notify_device_off": { + "message": "Error sending notification to device {name}: Device is off and cannot be controlled." + }, + "notify_icon_not_found": { + "message": "Icon {icon_path} not found when sending notification for device {name}" + }, + "notify_communication_error": { + "message": "Communication error while sending notification to device {name}: {error}" + }, + "unhandled_trigger_type": { + "message": "Unhandled trigger type: {trigger_type}" + }, + "unknown_trigger_platform": { + "message": "Unknown trigger platform: {platform}" + }, + "invalid_entity_id": { + "message": "Entity {entity_id} is not a valid webostv entity." + }, + "source_not_found": { + "message": "Source {source} not found in the sources list for {name}." + } } } diff --git a/homeassistant/components/webostv/trigger.py b/homeassistant/components/webostv/trigger.py index 3290aa4a448..f121daafb91 100644 --- a/homeassistant/components/webostv/trigger.py +++ b/homeassistant/components/webostv/trigger.py @@ -1,4 +1,4 @@ -"""webOS Smart TV trigger dispatcher.""" +"""LG webOS TV trigger dispatcher.""" from __future__ import annotations @@ -6,6 +6,7 @@ from typing import cast from homeassistant.const import CONF_PLATFORM from homeassistant.core import CALLBACK_TYPE, HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.trigger import ( TriggerActionType, TriggerInfo, @@ -13,6 +14,7 @@ from homeassistant.helpers.trigger import ( ) from homeassistant.helpers.typing import ConfigType +from .const import DOMAIN from .triggers import turn_on TRIGGERS = { @@ -24,8 +26,10 @@ def _get_trigger_platform(config: ConfigType) -> TriggerProtocol: """Return trigger platform.""" platform_split = config[CONF_PLATFORM].split(".", maxsplit=1) if len(platform_split) < 2 or platform_split[1] not in TRIGGERS: - raise ValueError( - f"Unknown webOS Smart TV trigger platform {config[CONF_PLATFORM]}" + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unknown_trigger_platform", + translation_placeholders={"platform": config[CONF_PLATFORM]}, ) return cast(TriggerProtocol, TRIGGERS[platform_split[1]]) diff --git a/homeassistant/components/webostv/triggers/__init__.py b/homeassistant/components/webostv/triggers/__init__.py index d8c5a28ef3f..89bdf5f90ee 100644 --- a/homeassistant/components/webostv/triggers/__init__.py +++ b/homeassistant/components/webostv/triggers/__init__.py @@ -1 +1 @@ -"""webOS Smart TV triggers.""" +"""LG webOS TV triggers.""" diff --git a/homeassistant/components/webostv/triggers/turn_on.py b/homeassistant/components/webostv/triggers/turn_on.py index f2ecb8aa98d..648da690715 100644 --- a/homeassistant/components/webostv/triggers/turn_on.py +++ b/homeassistant/components/webostv/triggers/turn_on.py @@ -1,4 +1,4 @@ -"""webOS Smart TV device turn on trigger.""" +"""LG webOS TV device turn on trigger.""" from __future__ import annotations diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index ad9b9a6fe71..4c78e077d21 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], - "requirements": ["aiowithings==3.1.4"] + "requirements": ["aiowithings==3.1.5"] } diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index eba970dc2db..6efb66449ab 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -16,7 +16,7 @@ }, "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.42.0"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.43.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/components/youless/__init__.py b/homeassistant/components/youless/__init__.py index d475034cc9d..03a27b5a378 100644 --- a/homeassistant/components/youless/__init__.py +++ b/homeassistant/components/youless/__init__.py @@ -1,6 +1,5 @@ """The youless integration.""" -from datetime import timedelta import logging from urllib.error import URLError @@ -10,9 +9,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN +from .coordinator import YouLessCoordinator PLATFORMS = [Platform.SENSOR] @@ -28,24 +27,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except URLError as exception: raise ConfigEntryNotReady from exception - async def async_update_data() -> YoulessAPI: - """Fetch data from the API.""" - await hass.async_add_executor_job(api.update) - return api - - coordinator = DataUpdateCoordinator( - hass, - _LOGGER, - config_entry=entry, - name="youless_gateway", - update_method=async_update_data, - update_interval=timedelta(seconds=10), - ) - - await coordinator.async_config_entry_first_refresh() + youless_coordinator = YouLessCoordinator(hass, api) + await youless_coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = coordinator + hass.data[DOMAIN][entry.entry_id] = youless_coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/youless/coordinator.py b/homeassistant/components/youless/coordinator.py new file mode 100644 index 00000000000..0be5e463689 --- /dev/null +++ b/homeassistant/components/youless/coordinator.py @@ -0,0 +1,25 @@ +"""The coordinator for the Youless integration.""" + +from datetime import timedelta +import logging + +from youless_api import YoulessAPI + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class YouLessCoordinator(DataUpdateCoordinator[None]): + """Class to manage fetching YouLess data.""" + + def __init__(self, hass: HomeAssistant, device: YoulessAPI) -> None: + """Initialize global YouLess data provider.""" + super().__init__( + hass, _LOGGER, name="youless_gateway", update_interval=timedelta(seconds=10) + ) + self.device = device + + async def _async_update_data(self) -> None: + await self.hass.async_add_executor_job(self.device.update) diff --git a/homeassistant/components/youless/entity.py b/homeassistant/components/youless/entity.py new file mode 100644 index 00000000000..9931768c267 --- /dev/null +++ b/homeassistant/components/youless/entity.py @@ -0,0 +1,25 @@ +"""The entity for the Youless integration.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import YouLessCoordinator + + +class YouLessEntity(CoordinatorEntity[YouLessCoordinator]): + """Base entity for YouLess.""" + + def __init__( + self, coordinator: YouLessCoordinator, device_group: str, device_name: str + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.device = coordinator.device + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device_group)}, + manufacturer="YouLess", + model=self.device.model, + name=device_name, + sw_version=self.device.firmware_version, + ) diff --git a/homeassistant/components/youless/sensor.py b/homeassistant/components/youless/sensor.py index ed0fc703cc4..413f1ad6958 100644 --- a/homeassistant/components/youless/sensor.py +++ b/homeassistant/components/youless/sensor.py @@ -2,12 +2,15 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass + from youless_api import YoulessAPI -from youless_api.youless_sensor import YoulessSensor from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, + SensorEntityDescription, SensorStateClass, ) from homeassistant.config_entries import ConfigEntry @@ -20,346 +23,316 @@ from homeassistant.const import ( UnitOfVolume, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) from . import DOMAIN +from .coordinator import YouLessCoordinator +from .entity import YouLessEntity + + +@dataclass(frozen=True, kw_only=True) +class YouLessSensorEntityDescription(SensorEntityDescription): + """Describes a YouLess sensor entity.""" + + device_group: str + device_group_name: str + value_func: Callable[[YoulessAPI], float | None] + + +SENSOR_TYPES: tuple[YouLessSensorEntityDescription, ...] = ( + YouLessSensorEntityDescription( + key="water", + device_group="water", + device_group_name="Water meter", + name="Water usage", + icon="mdi:water", + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + value_func=( + lambda device: device.water_meter.value if device.water_meter else None + ), + ), + YouLessSensorEntityDescription( + key="gas", + device_group="gas", + device_group_name="Gas meter", + name="Gas usage", + icon="mdi:fire", + device_class=SensorDeviceClass.GAS, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + value_func=lambda device: device.gas_meter.value if device.gas_meter else None, + ), + YouLessSensorEntityDescription( + key="usage", + device_group="power", + device_group_name="Power usage", + name="Power Usage", + icon="mdi:meter-electric", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=( + lambda device: device.current_power_usage.value + if device.current_power_usage + else None + ), + ), + YouLessSensorEntityDescription( + key="power_low", + device_group="power", + device_group_name="Power usage", + name="Energy low", + icon="mdi:transmission-tower-export", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_func=( + lambda device: device.power_meter.low.value if device.power_meter else None + ), + ), + YouLessSensorEntityDescription( + key="power_high", + device_group="power", + device_group_name="Power usage", + name="Energy high", + icon="mdi:transmission-tower-export", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_func=( + lambda device: device.power_meter.high.value if device.power_meter else None + ), + ), + YouLessSensorEntityDescription( + key="power_total", + device_group="power", + device_group_name="Power usage", + name="Energy total", + icon="mdi:transmission-tower-export", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_func=( + lambda device: device.power_meter.total.value + if device.power_meter + else None + ), + ), + YouLessSensorEntityDescription( + key="phase_1_power", + device_group="power", + device_group_name="Power usage", + name="Phase 1 power", + icon=None, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.phase1.power.value if device.phase1 else None, + ), + YouLessSensorEntityDescription( + key="phase_1_voltage", + device_group="power", + device_group_name="Power usage", + name="Phase 1 voltage", + icon=None, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=( + lambda device: device.phase1.voltage.value if device.phase1 else None + ), + ), + YouLessSensorEntityDescription( + key="phase_1_current", + device_group="power", + device_group_name="Power usage", + name="Phase 1 current", + icon=None, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=( + lambda device: device.phase1.current.value if device.phase1 else None + ), + ), + YouLessSensorEntityDescription( + key="phase_2_power", + device_group="power", + device_group_name="Power usage", + name="Phase 2 power", + icon=None, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.phase2.power.value if device.phase2 else None, + ), + YouLessSensorEntityDescription( + key="phase_2_voltage", + device_group="power", + device_group_name="Power usage", + name="Phase 2 voltage", + icon=None, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=( + lambda device: device.phase2.voltage.value if device.phase2 else None + ), + ), + YouLessSensorEntityDescription( + key="phase_2_current", + device_group="power", + device_group_name="Power usage", + name="Phase 2 current", + icon=None, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=( + lambda device: device.phase2.current.value if device.phase1 else None + ), + ), + YouLessSensorEntityDescription( + key="phase_3_power", + device_group="power", + device_group_name="Power usage", + name="Phase 3 power", + icon=None, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.phase3.power.value if device.phase3 else None, + ), + YouLessSensorEntityDescription( + key="phase_3_voltage", + device_group="power", + device_group_name="Power usage", + name="Phase 3 voltage", + icon=None, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=( + lambda device: device.phase3.voltage.value if device.phase3 else None + ), + ), + YouLessSensorEntityDescription( + key="phase_3_current", + device_group="power", + device_group_name="Power usage", + name="Phase 3 current", + icon=None, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=( + lambda device: device.phase3.current.value if device.phase1 else None + ), + ), + YouLessSensorEntityDescription( + key="delivery_low", + device_group="delivery", + device_group_name="Energy delivery", + name="Energy delivery low", + icon="mdi:transmission-tower-import", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_func=( + lambda device: device.delivery_meter.low.value + if device.delivery_meter + else None + ), + ), + YouLessSensorEntityDescription( + key="delivery_high", + device_group="delivery", + device_group_name="Energy delivery", + name="Energy delivery high", + icon="mdi:transmission-tower-import", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_func=( + lambda device: device.delivery_meter.high.value + if device.delivery_meter + else None + ), + ), + YouLessSensorEntityDescription( + key="extra_total", + device_group="extra", + device_group_name="Extra meter", + name="Extra total", + icon="mdi:meter-electric", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_func=( + lambda device: device.extra_meter.total.value + if device.extra_meter + else None + ), + ), + YouLessSensorEntityDescription( + key="extra_usage", + device_group="extra", + device_group_name="Extra meter", + name="Extra usage", + icon="mdi:lightning-bolt", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=( + lambda device: device.extra_meter.usage.value + if device.extra_meter + else None + ), + ), +) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Initialize the integration.""" - coordinator: DataUpdateCoordinator[YoulessAPI] = hass.data[DOMAIN][entry.entry_id] + coordinator: YouLessCoordinator = hass.data[DOMAIN][entry.entry_id] device = entry.data[CONF_DEVICE] if (device := entry.data[CONF_DEVICE]) is None: device = entry.entry_id async_add_entities( [ - WaterSensor(coordinator, device), - GasSensor(coordinator, device), - EnergyMeterSensor( - coordinator, device, "low", SensorStateClass.TOTAL_INCREASING - ), - EnergyMeterSensor( - coordinator, device, "high", SensorStateClass.TOTAL_INCREASING - ), - EnergyMeterSensor(coordinator, device, "total", SensorStateClass.TOTAL), - CurrentPowerSensor(coordinator, device), - DeliveryMeterSensor(coordinator, device, "low"), - DeliveryMeterSensor(coordinator, device, "high"), - ExtraMeterSensor(coordinator, device, "total"), - ExtraMeterPowerSensor(coordinator, device, "usage"), - PhasePowerSensor(coordinator, device, 1), - PhaseVoltageSensor(coordinator, device, 1), - PhaseCurrentSensor(coordinator, device, 1), - PhasePowerSensor(coordinator, device, 2), - PhaseVoltageSensor(coordinator, device, 2), - PhaseCurrentSensor(coordinator, device, 2), - PhasePowerSensor(coordinator, device, 3), - PhaseVoltageSensor(coordinator, device, 3), - PhaseCurrentSensor(coordinator, device, 3), + YouLessSensor(coordinator, description, device) + for description in SENSOR_TYPES ] ) -class YoulessBaseSensor( - CoordinatorEntity[DataUpdateCoordinator[YoulessAPI]], SensorEntity -): - """The base sensor for Youless.""" +class YouLessSensor(YouLessEntity, SensorEntity): + """Representation of a Sensor.""" + + entity_description: YouLessSensorEntityDescription def __init__( self, - coordinator: DataUpdateCoordinator[YoulessAPI], + coordinator: YouLessCoordinator, + description: YouLessSensorEntityDescription, device: str, - device_group: str, - friendly_name: str, - sensor_id: str, ) -> None: - """Create the sensor.""" - super().__init__(coordinator) - self._attr_unique_id = f"{DOMAIN}_{device}_{sensor_id}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, f"{device}_{device_group}")}, - manufacturer="YouLess", - model=self.coordinator.data.model, - name=friendly_name, + """Initialize the sensor.""" + super().__init__( + coordinator, + f"{device}_{description.device_group}", + description.device_group_name, ) - - @property - def get_sensor(self) -> YoulessSensor | None: - """Property to get the underlying sensor object.""" - return None + self._attr_unique_id = f"{DOMAIN}_{device}_{description.key}" + self.entity_description = description @property def native_value(self) -> StateType: - """Determine the state value, only if a sensor is initialized.""" - if self.get_sensor is None: - return None - - return self.get_sensor.value - - @property - def available(self) -> bool: - """Return a flag to indicate the sensor not being available.""" - return super().available and self.get_sensor is not None - - -class WaterSensor(YoulessBaseSensor): - """The Youless Water sensor.""" - - _attr_native_unit_of_measurement = UnitOfVolume.CUBIC_METERS - _attr_device_class = SensorDeviceClass.WATER - _attr_state_class = SensorStateClass.TOTAL_INCREASING - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str - ) -> None: - """Instantiate a Water sensor.""" - super().__init__(coordinator, device, "water", "Water meter", "water") - self._attr_name = "Water usage" - self._attr_icon = "mdi:water" - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor for providing the value.""" - return self.coordinator.data.water_meter - - -class GasSensor(YoulessBaseSensor): - """The Youless gas sensor.""" - - _attr_native_unit_of_measurement = UnitOfVolume.CUBIC_METERS - _attr_device_class = SensorDeviceClass.GAS - _attr_state_class = SensorStateClass.TOTAL_INCREASING - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str - ) -> None: - """Instantiate a gas sensor.""" - super().__init__(coordinator, device, "gas", "Gas meter", "gas") - self._attr_name = "Gas usage" - self._attr_icon = "mdi:fire" - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor for providing the value.""" - return self.coordinator.data.gas_meter - - -class CurrentPowerSensor(YoulessBaseSensor): - """The current power usage sensor.""" - - _attr_native_unit_of_measurement = UnitOfPower.WATT - _attr_device_class = SensorDeviceClass.POWER - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str - ) -> None: - """Instantiate the usage meter.""" - super().__init__(coordinator, device, "power", "Power usage", "usage") - self._device = device - self._attr_name = "Power Usage" - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor for providing the value.""" - return self.coordinator.data.current_power_usage - - -class DeliveryMeterSensor(YoulessBaseSensor): - """The Youless delivery meter value sensor.""" - - _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR - _attr_device_class = SensorDeviceClass.ENERGY - _attr_state_class = SensorStateClass.TOTAL_INCREASING - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str, dev_type: str - ) -> None: - """Instantiate a delivery meter sensor.""" - super().__init__( - coordinator, device, "delivery", "Energy delivery", f"delivery_{dev_type}" - ) - self._type = dev_type - self._attr_name = f"Energy delivery {dev_type}" - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor for providing the value.""" - if self.coordinator.data.delivery_meter is None: - return None - - return getattr(self.coordinator.data.delivery_meter, f"_{self._type}", None) - - -class EnergyMeterSensor(YoulessBaseSensor): - """The Youless low meter value sensor.""" - - _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR - _attr_device_class = SensorDeviceClass.ENERGY - _attr_state_class = SensorStateClass.TOTAL_INCREASING - - def __init__( - self, - coordinator: DataUpdateCoordinator[YoulessAPI], - device: str, - dev_type: str, - state_class: SensorStateClass, - ) -> None: - """Instantiate a energy meter sensor.""" - super().__init__( - coordinator, device, "power", "Energy usage", f"power_{dev_type}" - ) - self._device = device - self._type = dev_type - self._attr_name = f"Energy {dev_type}" - self._attr_state_class = state_class - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor for providing the value.""" - if self.coordinator.data.power_meter is None: - return None - - return getattr(self.coordinator.data.power_meter, f"_{self._type}", None) - - -class PhasePowerSensor(YoulessBaseSensor): - """The current power usage of a single phase.""" - - _attr_native_unit_of_measurement = UnitOfPower.WATT - _attr_device_class = SensorDeviceClass.POWER - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str, phase: int - ) -> None: - """Initialize the power phase sensor.""" - super().__init__( - coordinator, device, "power", "Energy usage", f"phase_{phase}_power" - ) - self._attr_name = f"Phase {phase} power" - self._phase = phase - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor value from the coordinator.""" - phase_sensor = getattr(self.coordinator.data, f"phase{self._phase}", None) - if phase_sensor is None: - return None - - return phase_sensor.power - - -class PhaseVoltageSensor(YoulessBaseSensor): - """The current voltage of a single phase.""" - - _attr_native_unit_of_measurement = UnitOfElectricPotential.VOLT - _attr_device_class = SensorDeviceClass.VOLTAGE - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str, phase: int - ) -> None: - """Initialize the voltage phase sensor.""" - super().__init__( - coordinator, device, "power", "Energy usage", f"phase_{phase}_voltage" - ) - self._attr_name = f"Phase {phase} voltage" - self._phase = phase - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor value from the coordinator for phase voltage.""" - phase_sensor = getattr(self.coordinator.data, f"phase{self._phase}", None) - if phase_sensor is None: - return None - - return phase_sensor.voltage - - -class PhaseCurrentSensor(YoulessBaseSensor): - """The current current of a single phase.""" - - _attr_native_unit_of_measurement = UnitOfElectricCurrent.AMPERE - _attr_device_class = SensorDeviceClass.CURRENT - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str, phase: int - ) -> None: - """Initialize the current phase sensor.""" - super().__init__( - coordinator, device, "power", "Energy usage", f"phase_{phase}_current" - ) - self._attr_name = f"Phase {phase} current" - self._phase = phase - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor value from the coordinator for phase current.""" - phase_sensor = getattr(self.coordinator.data, f"phase{self._phase}", None) - if phase_sensor is None: - return None - - return phase_sensor.current - - -class ExtraMeterSensor(YoulessBaseSensor): - """The Youless extra meter value sensor (s0).""" - - _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR - _attr_device_class = SensorDeviceClass.ENERGY - _attr_state_class = SensorStateClass.TOTAL_INCREASING - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str, dev_type: str - ) -> None: - """Instantiate an extra meter sensor.""" - super().__init__( - coordinator, device, "extra", "Extra meter", f"extra_{dev_type}" - ) - self._type = dev_type - self._attr_name = f"Extra {dev_type}" - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor for providing the value.""" - if self.coordinator.data.extra_meter is None: - return None - - return getattr(self.coordinator.data.extra_meter, f"_{self._type}", None) - - -class ExtraMeterPowerSensor(YoulessBaseSensor): - """The Youless extra meter power value sensor (s0).""" - - _attr_native_unit_of_measurement = UnitOfPower.WATT - _attr_device_class = SensorDeviceClass.POWER - _attr_state_class = SensorStateClass.MEASUREMENT - - def __init__( - self, coordinator: DataUpdateCoordinator[YoulessAPI], device: str, dev_type: str - ) -> None: - """Instantiate an extra meter power sensor.""" - super().__init__( - coordinator, device, "extra", "Extra meter", f"extra_{dev_type}" - ) - self._type = dev_type - self._attr_name = f"Extra {dev_type}" - - @property - def get_sensor(self) -> YoulessSensor | None: - """Get the sensor for providing the value.""" - if self.coordinator.data.extra_meter is None: - return None - - return getattr(self.coordinator.data.extra_meter, f"_{self._type}", None) + """Return the state of the sensor.""" + return self.entity_description.value_func(self.device) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index b301c1ad191..6fe2b5b1923 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.140.1"] + "requirements": ["zeroconf==0.141.0"] } diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index fc63b7e9119..e2d7720189d 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -1,28 +1,28 @@ { "config": { "abort": { - "addon_get_discovery_info_failed": "Failed to get Z-Wave JS add-on discovery info.", - "addon_info_failed": "Failed to get Z-Wave JS add-on info.", - "addon_install_failed": "Failed to install the Z-Wave JS add-on.", - "addon_set_config_failed": "Failed to set Z-Wave JS configuration.", - "addon_start_failed": "Failed to start the Z-Wave JS add-on.", + "addon_get_discovery_info_failed": "Failed to get Z-Wave add-on discovery info.", + "addon_info_failed": "Failed to get Z-Wave add-on info.", + "addon_install_failed": "Failed to install the Z-Wave add-on.", + "addon_set_config_failed": "Failed to set Z-Wave configuration.", + "addon_start_failed": "Failed to start the Z-Wave add-on.", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "discovery_requires_supervisor": "Discovery requires the supervisor.", "not_zwave_device": "Discovered device is not a Z-Wave device.", - "not_zwave_js_addon": "Discovered add-on is not the official Z-Wave JS add-on." + "not_zwave_js_addon": "Discovered add-on is not the official Z-Wave add-on." }, "error": { - "addon_start_failed": "Failed to start the Z-Wave JS add-on. Check the configuration.", + "addon_start_failed": "Failed to start the Z-Wave add-on. Check the configuration.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_ws_url": "Invalid websocket URL", "unknown": "[%key:common::config_flow::error::unknown%]" }, "flow_title": "{name}", "progress": { - "install_addon": "Please wait while the Z-Wave JS add-on installation finishes. This can take several minutes.", - "start_addon": "Please wait while the Z-Wave JS add-on start completes. This may take some seconds." + "install_addon": "Please wait while the Z-Wave add-on installation finishes. This can take several minutes.", + "start_addon": "Please wait while the Z-Wave add-on start completes. This may take some seconds." }, "step": { "configure_addon": { @@ -34,13 +34,13 @@ "usb_path": "[%key:common::config_flow::data::usb_path%]" }, "description": "The add-on will generate security keys if those fields are left empty.", - "title": "Enter the Z-Wave JS add-on configuration" + "title": "Enter the Z-Wave add-on configuration" }, "hassio_confirm": { - "title": "Set up Z-Wave JS integration with the Z-Wave JS add-on" + "title": "Set up Z-Wave integration with the Z-Wave add-on" }, "install_addon": { - "title": "The Z-Wave JS add-on installation has started" + "title": "The Z-Wave add-on installation has started" }, "manual": { "data": { @@ -49,20 +49,20 @@ }, "on_supervisor": { "data": { - "use_addon": "Use the Z-Wave JS Supervisor add-on" + "use_addon": "Use the Z-Wave Supervisor add-on" }, - "description": "Do you want to use the Z-Wave JS Supervisor add-on?", + "description": "Do you want to use the Z-Wave Supervisor add-on?", "title": "Select connection method" }, "start_addon": { - "title": "The Z-Wave JS add-on is starting." + "title": "The Z-Wave add-on is starting." }, "usb_confirm": { - "description": "Do you want to set up {name} with the Z-Wave JS add-on?" + "description": "Do you want to set up {name} with the Z-Wave add-on?" }, "zeroconf_confirm": { - "description": "Do you want to add the Z-Wave JS Server with home ID {home_id} found at {url} to Home Assistant?", - "title": "Discovered Z-Wave JS Server" + "description": "Do you want to add the Z-Wave Server with home ID {home_id} found at {url} to Home Assistant?", + "title": "Discovered Z-Wave Server" } } }, @@ -89,7 +89,7 @@ "event.value_notification.scene_activation": "Scene Activation on {subtype}", "state.node_status": "Node status changed", "zwave_js.value_updated.config_parameter": "Value change on config parameter {subtype}", - "zwave_js.value_updated.value": "Value change on a Z-Wave JS Value" + "zwave_js.value_updated.value": "Value change on a Z-Wave Value" }, "extra_fields": { "code_slot": "Code slot", @@ -191,7 +191,7 @@ }, "step": { "init": { - "description": "The device configuration file for {device_name} has changed.\n\nZ-Wave JS discovers a lot of device metadata by interviewing the device. However, some of the information has to be loaded from a configuration file. Some of this information is only evaluated once, during the device interview.\n\nWhen a device config file is updated, this information may be stale and and the device must be re-interviewed to pick up the changes.\n\n This is not a required operation and device functionality will be impacted during the re-interview process, but you may see improvements for your device once it is complete.\n\nIf you decide to proceed with the re-interview, it will take place in the background.", + "description": "The device configuration file for {device_name} has changed.\n\nZ-Wave discovers a lot of device metadata by interviewing the device. However, some of the information has to be loaded from a configuration file. Some of this information is only evaluated once, during the device interview.\n\nWhen a device config file is updated, this information may be stale and and the device must be re-interviewed to pick up the changes.\n\n This is not a required operation and device functionality will be impacted during the re-interview process, but you may see improvements for your device once it is complete.\n\nIf you decide to proceed with the re-interview, it will take place in the background.", "menu_options": { "confirm": "Re-interview device", "ignore": "Ignore device config update" @@ -203,8 +203,8 @@ "title": "Device configuration file changed: {device_name}" }, "invalid_server_version": { - "description": "The version of Z-Wave JS Server you are currently running is too old for this version of Home Assistant. Please update the Z-Wave JS Server to the latest version to fix this issue.", - "title": "Newer version of Z-Wave JS Server needed" + "description": "The version of Z-Wave Server you are currently running is too old for this version of Home Assistant. Please update the Z-Wave Server to the latest version to fix this issue.", + "title": "Newer version of Z-Wave Server needed" } }, "options": { @@ -306,7 +306,7 @@ "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { "area_id": { - "description": "The area(s) to target for this action. If an area is specified, all zwave_js devices and entities in that area will be targeted for this action.", + "description": "The area(s) to target for this action. If an area is specified, all Z-Wave devices and entities in that area will be targeted for this action.", "name": "Area ID(s)" }, "command_class": { @@ -326,18 +326,18 @@ "name": "Entity ID(s)" }, "method_name": { - "description": "The name of the API method to call. Refer to the Z-Wave JS Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for available methods.", + "description": "The name of the API method to call. Refer to the Z-Wave Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for available methods.", "name": "Method name" }, "parameters": { - "description": "A list of parameters to pass to the API method. Refer to the Z-Wave JS Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for parameters.", + "description": "A list of parameters to pass to the API method. Refer to the Z-Wave Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for parameters.", "name": "Parameters" } }, "name": "Invoke a Command Class API on a node (advanced)" }, "multicast_set_value": { - "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", + "description": "Changes any value that Z-Wave recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "area_id": { "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", @@ -383,7 +383,7 @@ "name": "Set a value on multiple devices via multicast (advanced)" }, "ping": { - "description": "Forces Z-Wave JS to try to reach a node. This can be used to update the status of the node in Z-Wave JS when you think it doesn't accurately reflect reality, e.g. reviving a failed/dead node or marking the node as asleep.", + "description": "Forces Z-Wave to try to reach a node. This can be used to update the status of the node in Z-Wave when you think it doesn't accurately reflect reality, e.g. reviving a failed/dead node or marking the node as asleep.", "fields": { "area_id": { "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", @@ -474,7 +474,7 @@ "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" }, "bitmask": { - "description": "Target a specific bitmask (see the documentation for more information). Cannot be combined with value_size or value_format.", + "description": "Target a specific bitmask (see the documentation for more information). Cannot be combined with 'Value size' or 'Value format'.", "name": "Bitmask" }, "device_id": { @@ -498,11 +498,11 @@ "name": "Value" }, "value_format": { - "description": "Format of the value, 0 for signed integer, 1 for unsigned integer, 2 for enumerated, 3 for bitfield. Used in combination with value_size when a config parameter is not defined in your device's configuration file. Cannot be combined with bitmask.", + "description": "Format of the value, 0 for signed integer, 1 for unsigned integer, 2 for enumerated, 3 for bitfield. Used in combination with 'Value size' when a config parameter is not defined in your device's configuration file. Cannot be combined with 'Bitmask'.", "name": "Value format" }, "value_size": { - "description": "Size of the value, either 1, 2, or 4. Used in combination with value_format when a config parameter is not defined in your device's configuration file. Cannot be combined with bitmask.", + "description": "Size of the value, either 1, 2, or 4. Used in combination with 'Value format' when a config parameter is not defined in your device's configuration file. Cannot be combined with 'Bitmask'.", "name": "Value size" } }, @@ -553,10 +553,10 @@ "name": "Set lock user code" }, "set_value": { - "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", + "description": "Changes any value that Z-Wave recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "area_id": { - "description": "The area(s) to target for this action. If an area is specified, all zwave_js devices and entities in that area will be targeted for this action.", + "description": "The area(s) to target for this action. If an area is specified, all Z-Wave devices and entities in that area will be targeted for this action.", "name": "Area ID(s)" }, "command_class": { @@ -576,7 +576,7 @@ "name": "Entity ID(s)" }, "options": { - "description": "Set value options map. Refer to the Z-Wave JS documentation for more information on what options can be set.", + "description": "Set value options map. Refer to the Z-Wave documentation for more information on what options can be set.", "name": "Options" }, "property": { diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index b4e6660275c..8a5880dcde9 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -434,10 +434,6 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "led_ble", "local_name": "AP-*", }, - { - "domain": "led_ble", - "local_name": "MELK-*", - }, { "domain": "led_ble", "local_name": "LD-0003", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 5fef087a868..7d14ab0f444 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -253,6 +253,15 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "hunter*", "macaddress": "002674*", }, + { + "domain": "incomfort", + "hostname": "rfgateway", + "macaddress": "0004A3*", + }, + { + "domain": "incomfort", + "registered_devices": True, + }, { "domain": "insteon", "macaddress": "000EF3*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 2ee871964c9..9a7167f5367 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3340,7 +3340,7 @@ "integration_type": "hub", "config_flow": true, "iot_class": "local_push", - "name": "LG webOS Smart TV" + "name": "LG webOS TV" } } }, diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index abad11bb36e..ea376923f9d 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -5,15 +5,20 @@ from __future__ import annotations from abc import ABC, abstractmethod from collections.abc import Callable from dataclasses import dataclass +from datetime import timedelta from decimal import Decimal from enum import Enum from functools import cache, partial -from typing import Any +from typing import Any, cast import slugify as unicode_slug import voluptuous as vol from voluptuous_openapi import UNSUPPORTED, convert +from homeassistant.components.calendar import ( + DOMAIN as CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, +) from homeassistant.components.climate import INTENT_GET_TEMPERATURE from homeassistant.components.cover import INTENT_CLOSE_COVER, INTENT_OPEN_COVER from homeassistant.components.homeassistant import async_should_expose @@ -28,7 +33,7 @@ from homeassistant.const import ( ) from homeassistant.core import Context, Event, HomeAssistant, callback, split_entity_id from homeassistant.exceptions import HomeAssistantError -from homeassistant.util import yaml as yaml_util +from homeassistant.util import dt as dt_util, yaml as yaml_util from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import JsonObjectType @@ -415,6 +420,8 @@ class AssistAPI(API): IntentTool(self.cached_slugify(intent_handler.intent_type), intent_handler) for intent_handler in intent_handlers ] + if exposed_domains and CALENDAR_DOMAIN in exposed_domains: + tools.append(CalendarGetEventsTool()) if llm_context.assistant is not None: for state in self.hass.states.async_all(SCRIPT_DOMAIN): @@ -755,3 +762,66 @@ class ScriptTool(Tool): ) return {"success": True, "result": result} + + +class CalendarGetEventsTool(Tool): + """LLM Tool allowing querying a calendar.""" + + name = "calendar_get_events" + description = ( + "Get events from a calendar. " + "When asked when something happens, search the whole week. " + "Results are RFC 5545 which means 'end' is exclusive." + ) + parameters = vol.Schema( + { + vol.Required("calendar"): cv.string, + vol.Required("range"): vol.In(["today", "week"]), + } + ) + + async def async_call( + self, hass: HomeAssistant, tool_input: ToolInput, llm_context: LLMContext + ) -> JsonObjectType: + """Query a calendar.""" + data = self.parameters(tool_input.tool_args) + result = intent.async_match_targets( + hass, + intent.MatchTargetsConstraints( + name=data["calendar"], + domains=[CALENDAR_DOMAIN], + assistant=llm_context.assistant, + ), + ) + if not result.is_match: + return {"success": False, "error": "Calendar not found"} + + entity_id = result.states[0].entity_id + if data["range"] == "today": + start = dt_util.now() + end = dt_util.start_of_local_day() + timedelta(days=1) + elif data["range"] == "week": + start = dt_util.now() + end = dt_util.start_of_local_day() + timedelta(days=7) + + service_data = { + "entity_id": entity_id, + "start_date_time": start.isoformat(), + "end_date_time": end.isoformat(), + } + + service_result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + service_data, + context=llm_context.context, + blocking=True, + return_response=True, + ) + + events = [ + event if "T" in event["start"] else {**event, "all_day": True} + for event in cast(dict, service_result)[entity_id]["events"] + ] + + return {"success": True, "result": events} diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index fac03300bdc..7866250d658 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -1735,7 +1735,7 @@ def label_entities(hass: HomeAssistant, label_id_or_name: str) -> Iterable[str]: return [entry.entity_id for entry in entries] -def closest(hass, *args): +def closest(hass: HomeAssistant, *args: Any) -> State | None: """Find closest entity. Closest to home: @@ -1775,21 +1775,24 @@ def closest(hass, *args): ) return None - latitude = point_state.attributes.get(ATTR_LATITUDE) - longitude = point_state.attributes.get(ATTR_LONGITUDE) + latitude = point_state.attributes[ATTR_LATITUDE] + longitude = point_state.attributes[ATTR_LONGITUDE] entities = args[1] else: - latitude = convert(args[0], float) - longitude = convert(args[1], float) + latitude_arg = convert(args[0], float) + longitude_arg = convert(args[1], float) - if latitude is None or longitude is None: + if latitude_arg is None or longitude_arg is None: _LOGGER.warning( "Closest:Received invalid coordinates: %s, %s", args[0], args[1] ) return None + latitude = latitude_arg + longitude = longitude_arg + entities = args[2] states = expand(hass, entities) @@ -1798,20 +1801,20 @@ def closest(hass, *args): return loc_helper.closest(latitude, longitude, states) -def closest_filter(hass, *args): +def closest_filter(hass: HomeAssistant, *args: Any) -> State | None: """Call closest as a filter. Need to reorder arguments.""" new_args = list(args[1:]) new_args.append(args[0]) return closest(hass, *new_args) -def distance(hass, *args): +def distance(hass: HomeAssistant, *args: Any) -> float | None: """Calculate distance. Will calculate distance from home to a point or between points. Points can be passed in using state objects or lat/lng coordinates. """ - locations = [] + locations: list[tuple[float, float]] = [] to_process = list(args) @@ -1831,10 +1834,10 @@ def distance(hass, *args): return None value_2 = to_process.pop(0) - latitude = convert(value, float) - longitude = convert(value_2, float) + latitude_to_process = convert(value, float) + longitude_to_process = convert(value_2, float) - if latitude is None or longitude is None: + if latitude_to_process is None or longitude_to_process is None: _LOGGER.warning( "Distance:Unable to process latitude and longitude: %s, %s", value, @@ -1842,6 +1845,9 @@ def distance(hass, *args): ) return None + latitude = latitude_to_process + longitude = longitude_to_process + else: if not loc_helper.has_location(point_state): _LOGGER.warning( @@ -1849,8 +1855,8 @@ def distance(hass, *args): ) return None - latitude = point_state.attributes.get(ATTR_LATITUDE) - longitude = point_state.attributes.get(ATTR_LONGITUDE) + latitude = point_state.attributes[ATTR_LATITUDE] + longitude = point_state.attributes[ATTR_LONGITUDE] locations.append((latitude, longitude)) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index a804cb90cf3..cb29214390b 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -11,16 +11,16 @@ aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 -async-upnp-client==0.42.0 +async-upnp-client==0.43.0 atomicwrites-homeassistant==1.4.1 attrs==24.2.0 audioop-lts==0.2.1;python_version>='3.13' av==13.1.0 awesomeversion==24.6.0 bcrypt==4.2.0 -bleak-retry-connector==3.7.0 +bleak-retry-connector==3.8.0 bleak==0.22.3 -bluetooth-adapters==0.21.0 +bluetooth-adapters==0.21.1 bluetooth-auto-recovery==1.4.2 bluetooth-data-tools==1.22.0 cached-ipaddress==0.8.0 @@ -32,13 +32,13 @@ dbus-fast==2.30.2 fnv-hash-fast==1.2.2 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 -habluetooth==3.9.2 +habluetooth==3.12.0 hass-nabucasa==0.88.1 hassil==2.1.0 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20250109.0 +home-assistant-frontend==20250109.2 home-assistant-intents==2025.1.1 -httpx==0.27.2 +httpx==0.28.1 ifaddr==0.2.0 Jinja2==3.1.5 lru-dict==1.3.0 @@ -73,7 +73,7 @@ voluptuous-serialize==2.6.0 voluptuous==0.15.2 webrtc-models==0.3.0 yarl==1.18.3 -zeroconf==0.140.1 +zeroconf==0.141.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -110,7 +110,7 @@ uuid==1000000000.0.0 # requirements so we can directly link HA versions to these library versions. anyio==4.8.0 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 # Ensure we have a hyperframe version that works in Python 3.10 # 5.2.0 fixed a collections abc deprecation @@ -128,7 +128,7 @@ multidict>=6.0.2 backoff>=2.0 # ensure pydantic version does not float since it might have breaking changes -pydantic==2.10.4 +pydantic==2.10.6 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 diff --git a/homeassistant/scripts/benchmark/__init__.py b/homeassistant/scripts/benchmark/__init__.py index b769d385a4f..c16269a2a8b 100644 --- a/homeassistant/scripts/benchmark/__init__.py +++ b/homeassistant/scripts/benchmark/__init__.py @@ -58,7 +58,7 @@ def benchmark[_CallableT: Callable](func: _CallableT) -> _CallableT: @benchmark -async def fire_events(hass): +async def fire_events(hass: core.HomeAssistant) -> float: """Fire a million events.""" count = 0 event_name = "benchmark_event" @@ -85,7 +85,7 @@ async def fire_events(hass): @benchmark -async def fire_events_with_filter(hass): +async def fire_events_with_filter(hass: core.HomeAssistant) -> float: """Fire a million events with a filter that rejects them.""" count = 0 event_name = "benchmark_event" @@ -117,7 +117,7 @@ async def fire_events_with_filter(hass): @benchmark -async def state_changed_helper(hass): +async def state_changed_helper(hass: core.HomeAssistant) -> float: """Run a million events through state changed helper with 1000 entities.""" count = 0 entity_id = "light.kitchen" @@ -141,7 +141,7 @@ async def state_changed_helper(hass): } for _ in range(10**6): - hass.bus.async_fire(EVENT_STATE_CHANGED, event_data) + hass.bus.async_fire(EVENT_STATE_CHANGED, event_data) # type: ignore[misc] start = timer() @@ -151,7 +151,7 @@ async def state_changed_helper(hass): @benchmark -async def state_changed_event_helper(hass): +async def state_changed_event_helper(hass: core.HomeAssistant) -> float: """Run a million events through state changed event helper with 1000 entities.""" count = 0 entity_id = "light.kitchen" @@ -174,7 +174,7 @@ async def state_changed_event_helper(hass): } for _ in range(events_to_fire): - hass.bus.async_fire(EVENT_STATE_CHANGED, event_data) + hass.bus.async_fire(EVENT_STATE_CHANGED, event_data) # type: ignore[misc] start = timer() @@ -186,7 +186,7 @@ async def state_changed_event_helper(hass): @benchmark -async def state_changed_event_filter_helper(hass): +async def state_changed_event_filter_helper(hass: core.HomeAssistant) -> float: """Run a million events through state changed event helper. With 1000 entities that all get filtered. @@ -212,7 +212,7 @@ async def state_changed_event_filter_helper(hass): } for _ in range(events_to_fire): - hass.bus.async_fire(EVENT_STATE_CHANGED, event_data) + hass.bus.async_fire(EVENT_STATE_CHANGED, event_data) # type: ignore[misc] start = timer() @@ -224,7 +224,7 @@ async def state_changed_event_filter_helper(hass): @benchmark -async def filtering_entity_id(hass): +async def filtering_entity_id(hass: core.HomeAssistant) -> float: """Run a 100k state changes through entity filter.""" config = { "include": { @@ -289,7 +289,7 @@ async def filtering_entity_id(hass): @benchmark -async def valid_entity_id(hass): +async def valid_entity_id(hass: core.HomeAssistant) -> float: """Run valid entity ID a million times.""" start = timer() for _ in range(10**6): @@ -298,7 +298,7 @@ async def valid_entity_id(hass): @benchmark -async def json_serialize_states(hass): +async def json_serialize_states(hass: core.HomeAssistant) -> float: """Serialize million states with websocket default encoder.""" states = [ core.State("light.kitchen", "on", {"friendly_name": "Kitchen Lights"}) diff --git a/mypy.ini b/mypy.ini index e4056203875..7f7b66e238f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2126,6 +2126,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.homee.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.homekit.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2376,6 +2386,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.incomfort.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.input_button.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2826,6 +2846,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.lovelace.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.luftdaten.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index d06d078ae8b..f76e0b43c10 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -106,7 +106,8 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "aiohttp_client": "ClientSessionGenerator", "aiohttp_server": "Callable[[], TestServer]", "area_registry": "AreaRegistry", - "async_test_recorder": "RecorderInstanceGenerator", + "async_test_recorder": "RecorderInstanceContextManager", + "async_setup_recorder_instance": "RecorderInstanceGenerator", "caplog": "pytest.LogCaptureFixture", "capsys": "pytest.CaptureFixture[str]", "current_request_with_host": "None", diff --git a/pyproject.toml b/pyproject.toml index c4a1c45671a..56f2533840a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ dependencies = [ "hass-nabucasa==0.88.1", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all - "httpx==0.27.2", + "httpx==0.28.1", "home-assistant-bluetooth==1.13.0", "ifaddr==0.2.0", "Jinja2==3.1.5", @@ -82,7 +82,7 @@ dependencies = [ "voluptuous-openapi==0.0.6", "yarl==1.18.3", "webrtc-models==0.3.0", - "zeroconf==0.140.1" + "zeroconf==0.141.0" ] [project.urls] diff --git a/requirements.txt b/requirements.txt index 91a5d131b3b..f1eb8dac825 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,7 +22,7 @@ ciso8601==2.3.2 cronsim==2.6 fnv-hash-fast==1.2.2 hass-nabucasa==0.88.1 -httpx==0.27.2 +httpx==0.28.1 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 Jinja2==3.1.5 @@ -51,4 +51,4 @@ voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.6 yarl==1.18.3 webrtc-models==0.3.0 -zeroconf==0.140.1 +zeroconf==0.141.0 diff --git a/requirements_all.txt b/requirements_all.txt index 2a05e882e17..f9a58779c8e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -7,7 +7,7 @@ AEMET-OpenData==0.6.4 # homeassistant.components.honeywell -AIOSomecomfort==0.0.28 +AIOSomecomfort==0.0.30 # homeassistant.components.adax Adax-local==0.1.5 @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.39.1 +PyViCare==2.41.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -128,7 +128,7 @@ TravisPy==0.3.5 TwitterAPI==2.7.12 # homeassistant.components.onvif -WSDiscovery==2.0.0 +WSDiscovery==2.1.2 # homeassistant.components.accuweather accuweather==4.0.0 @@ -173,7 +173,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.13 +aioacaia==0.1.14 # homeassistant.components.airq aioairq==0.4.3 @@ -201,7 +201,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2025.1.0 +aioautomower==2025.1.1 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -258,7 +258,7 @@ aiogithubapi==24.6.0 aioguardian==2022.07.0 # homeassistant.components.harmony -aioharmony==0.2.10 +aioharmony==0.4.1 # homeassistant.components.hassio aiohasupervisor==0.2.2b5 @@ -416,10 +416,10 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.5.0 +aiowebostv==0.6.0 # homeassistant.components.withings -aiowithings==3.1.4 +aiowithings==3.1.5 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -467,7 +467,7 @@ anova-wifi==0.17.0 anthemav==1.4.1 # homeassistant.components.anthropic -anthropic==0.31.2 +anthropic==0.44.0 # homeassistant.components.mcp_server anyio==4.8.0 @@ -505,7 +505,7 @@ asmog==0.0.6 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.42.0 +async-upnp-client==0.43.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -591,10 +591,10 @@ bizkaibus==0.1.1 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==2.0.0 +bleak-esphome==2.1.1 # homeassistant.components.bluetooth -bleak-retry-connector==3.7.0 +bleak-retry-connector==3.8.0 # homeassistant.components.bluetooth bleak==0.22.3 @@ -619,7 +619,7 @@ bluemaestro-ble==0.2.3 # bluepy==1.3.0 # homeassistant.components.bluetooth -bluetooth-adapters==0.21.0 +bluetooth-adapters==0.21.1 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -1097,7 +1097,7 @@ ha-philipsjs==3.2.2 habiticalib==0.3.3 # homeassistant.components.bluetooth -habluetooth==3.9.2 +habluetooth==3.12.0 # homeassistant.components.cloud hass-nabucasa==0.88.1 @@ -1140,7 +1140,7 @@ hole==0.8.0 holidays==0.65 # homeassistant.components.frontend -home-assistant-frontend==20250109.0 +home-assistant-frontend==20250109.2 # homeassistant.components.conversation home-assistant-intents==2025.1.1 @@ -1199,7 +1199,7 @@ ifaddr==0.2.0 iglo==1.2.7 # homeassistant.components.igloohome -igloohome-api==0.0.6 +igloohome-api==0.1.0 # homeassistant.components.ihc ihcsdk==2.8.5 @@ -1208,7 +1208,7 @@ ihcsdk==2.8.5 imgw_pib==1.0.9 # homeassistant.components.incomfort -incomfort-client==0.6.4 +incomfort-client==0.6.7 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -1296,7 +1296,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.1.1 +led-ble==1.1.4 # homeassistant.components.lektrico lektricowifi==0.0.43 @@ -1446,7 +1446,7 @@ mutesync==0.0.1 mypermobil==0.1.8 # homeassistant.components.myuplink -myuplink==0.6.0 +myuplink==0.7.0 # homeassistant.components.nad nad-receiver==0.3.0 @@ -1482,7 +1482,7 @@ nextcord==2.6.0 nextdns==4.0.0 # homeassistant.components.niko_home_control -nhc==0.3.4 +nhc==0.3.9 # homeassistant.components.nibe_heatpump nibe==2.14.0 @@ -1540,10 +1540,10 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ohme -ohme==1.2.3 +ohme==1.2.5 # homeassistant.components.ollama -ollama==0.4.5 +ollama==0.4.7 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1552,7 +1552,7 @@ omnilogic==0.4.5 ondilo==0.5.0 # homeassistant.components.onvif -onvif-zeep-async==3.2.3 +onvif-zeep-async==3.2.5 # homeassistant.components.opengarage open-garage==0.2.0 @@ -1561,7 +1561,7 @@ open-garage==0.2.0 open-meteo==0.3.2 # homeassistant.components.openai_conversation -openai==1.35.7 +openai==1.59.9 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1618,7 +1618,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.3.3 +peblar==0.4.0 # homeassistant.components.peco peco==0.0.30 @@ -1665,7 +1665,7 @@ pmsensor==0.4 poolsense==0.0.8 # homeassistant.components.powerfox -powerfox==1.2.0 +powerfox==1.2.1 # homeassistant.components.reddit praw==7.5.0 @@ -1763,7 +1763,7 @@ pyEmby==1.10 pyHik==0.3.2 # homeassistant.components.homee -pyHomee==1.2.0 +pyHomee==1.2.3 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1923,7 +1923,7 @@ pyeiscp==0.0.7 pyemoncms==0.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.23.0 +pyenphase==1.23.1 # homeassistant.components.envisalink pyenvisalink==4.7 @@ -2303,7 +2303,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.1.5 +pysmlight==0.1.6 # homeassistant.components.snmp pysnmp==6.2.6 @@ -2399,7 +2399,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.9.1 # homeassistant.components.linkplay -python-linkplay==0.1.1 +python-linkplay==0.1.3 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2506,7 +2506,7 @@ pyvera==0.3.15 pyversasense==0.0.6 # homeassistant.components.vesync -pyvesync==2.1.15 +pyvesync==2.1.16 # homeassistant.components.vizio pyvizio==0.1.61 @@ -2596,7 +2596,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.11.6 +reolink-aio==0.11.8 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2934,7 +2934,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==7.4.1 +uiprotect==7.5.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -3118,7 +3118,7 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.140.1 +zeroconf==0.141.0 # homeassistant.components.zeversolar zeversolar==0.3.2 diff --git a/requirements_test.txt b/requirements_test.txt index 029073f19a2..68945852298 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -14,7 +14,7 @@ license-expression==30.4.0 mock-open==1.4.0 mypy-dev==1.15.0a2 pre-commit==4.0.0 -pydantic==2.10.4 +pydantic==2.10.6 pylint==3.3.3 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 @@ -31,7 +31,7 @@ pytest-picked==0.5.0 pytest-xdist==3.6.1 pytest==8.3.4 requests-mock==1.12.1 -respx==0.21.1 +respx==0.22.0 syrupy==4.8.0 tqdm==4.66.5 types-aiofiles==24.1.0.20241221 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0e7b4a92a93..127d08c22d6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -7,7 +7,7 @@ AEMET-OpenData==0.6.4 # homeassistant.components.honeywell -AIOSomecomfort==0.0.28 +AIOSomecomfort==0.0.30 # homeassistant.components.adax Adax-local==0.1.5 @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.39.1 +PyViCare==2.41.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -116,7 +116,7 @@ SQLAlchemy==2.0.36 Tami4EdgeAPI==3.0 # homeassistant.components.onvif -WSDiscovery==2.0.0 +WSDiscovery==2.1.2 # homeassistant.components.accuweather accuweather==4.0.0 @@ -161,7 +161,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.13 +aioacaia==0.1.14 # homeassistant.components.airq aioairq==0.4.3 @@ -189,7 +189,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2025.1.0 +aioautomower==2025.1.1 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -243,7 +243,7 @@ aiogithubapi==24.6.0 aioguardian==2022.07.0 # homeassistant.components.harmony -aioharmony==0.2.10 +aioharmony==0.4.1 # homeassistant.components.hassio aiohasupervisor==0.2.2b5 @@ -398,10 +398,10 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.5.0 +aiowebostv==0.6.0 # homeassistant.components.withings -aiowithings==3.1.4 +aiowithings==3.1.5 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -440,7 +440,7 @@ anova-wifi==0.17.0 anthemav==1.4.1 # homeassistant.components.anthropic -anthropic==0.31.2 +anthropic==0.44.0 # homeassistant.components.mcp_server anyio==4.8.0 @@ -469,7 +469,7 @@ arcam-fmj==1.5.2 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.42.0 +async-upnp-client==0.43.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -522,10 +522,10 @@ bimmer-connected[china]==0.17.2 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==2.0.0 +bleak-esphome==2.1.1 # homeassistant.components.bluetooth -bleak-retry-connector==3.7.0 +bleak-retry-connector==3.8.0 # homeassistant.components.bluetooth bleak==0.22.3 @@ -543,7 +543,7 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.bluetooth -bluetooth-adapters==0.21.0 +bluetooth-adapters==0.21.1 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -938,7 +938,7 @@ ha-philipsjs==3.2.2 habiticalib==0.3.3 # homeassistant.components.bluetooth -habluetooth==3.9.2 +habluetooth==3.12.0 # homeassistant.components.cloud hass-nabucasa==0.88.1 @@ -969,7 +969,7 @@ hole==0.8.0 holidays==0.65 # homeassistant.components.frontend -home-assistant-frontend==20250109.0 +home-assistant-frontend==20250109.2 # homeassistant.components.conversation home-assistant-intents==2025.1.1 @@ -1016,13 +1016,13 @@ idasen-ha==2.6.3 ifaddr==0.2.0 # homeassistant.components.igloohome -igloohome-api==0.0.6 +igloohome-api==0.1.0 # homeassistant.components.imgw_pib imgw_pib==1.0.9 # homeassistant.components.incomfort -incomfort-client==0.6.4 +incomfort-client==0.6.7 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -1095,7 +1095,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.1.1 +led-ble==1.1.4 # homeassistant.components.lektrico lektricowifi==0.0.43 @@ -1218,7 +1218,7 @@ mutesync==0.0.1 mypermobil==0.1.8 # homeassistant.components.myuplink -myuplink==0.6.0 +myuplink==0.7.0 # homeassistant.components.keenetic_ndms2 ndms2-client==0.1.2 @@ -1245,7 +1245,7 @@ nextcord==2.6.0 nextdns==4.0.0 # homeassistant.components.niko_home_control -nhc==0.3.4 +nhc==0.3.9 # homeassistant.components.nibe_heatpump nibe==2.14.0 @@ -1288,10 +1288,10 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ohme -ohme==1.2.3 +ohme==1.2.5 # homeassistant.components.ollama -ollama==0.4.5 +ollama==0.4.7 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1300,7 +1300,7 @@ omnilogic==0.4.5 ondilo==0.5.0 # homeassistant.components.onvif -onvif-zeep-async==3.2.3 +onvif-zeep-async==3.2.5 # homeassistant.components.opengarage open-garage==0.2.0 @@ -1309,7 +1309,7 @@ open-garage==0.2.0 open-meteo==0.3.2 # homeassistant.components.openai_conversation -openai==1.35.7 +openai==1.59.9 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1345,7 +1345,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.3.3 +peblar==0.4.0 # homeassistant.components.peco peco==0.0.30 @@ -1375,7 +1375,7 @@ plumlightpad==0.0.11 poolsense==0.0.8 # homeassistant.components.powerfox -powerfox==1.2.0 +powerfox==1.2.1 # homeassistant.components.reddit praw==7.5.0 @@ -1452,7 +1452,7 @@ pyDuotecno==2024.10.1 pyElectra==1.2.4 # homeassistant.components.homee -pyHomee==1.2.0 +pyHomee==1.2.3 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1567,7 +1567,7 @@ pyeiscp==0.0.7 pyemoncms==0.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.23.0 +pyenphase==1.23.1 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1875,7 +1875,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.1.5 +pysmlight==0.1.6 # homeassistant.components.snmp pysnmp==6.2.6 @@ -1938,7 +1938,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.9.1 # homeassistant.components.linkplay -python-linkplay==0.1.1 +python-linkplay==0.1.3 # homeassistant.components.matter python-matter-server==7.0.0 @@ -2024,7 +2024,7 @@ pyuptimerobot==22.2.0 pyvera==0.3.15 # homeassistant.components.vesync -pyvesync==2.1.15 +pyvesync==2.1.16 # homeassistant.components.vizio pyvizio==0.1.61 @@ -2099,7 +2099,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.11.6 +reolink-aio==0.11.8 # homeassistant.components.rflink rflink==0.0.66 @@ -2359,7 +2359,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==7.4.1 +uiprotect==7.5.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2507,7 +2507,7 @@ yt-dlp[default]==2025.01.15 zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.140.1 +zeroconf==0.141.0 # homeassistant.components.zeversolar zeversolar==0.3.2 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index e2b60e777a2..ef57b9140ce 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -141,7 +141,7 @@ uuid==1000000000.0.0 # requirements so we can directly link HA versions to these library versions. anyio==4.8.0 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 # Ensure we have a hyperframe version that works in Python 3.10 # 5.2.0 fixed a collections abc deprecation @@ -159,7 +159,7 @@ multidict>=6.0.2 backoff>=2.0 # ensure pydantic version does not float since it might have breaking changes -pydantic==2.10.4 +pydantic==2.10.6 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 7ca7110c49b..3732101913c 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -595,7 +595,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "linux_battery", "lirc", "litejet", - "litterrobot", "livisi", "llamalab_automate", "local_calendar", @@ -979,7 +978,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "swisscom", "switch_as_x", "switchbee", - "switchbot", "switchbot_cloud", "switcher_kis", "switchmate", diff --git a/tests/components/airgradient/test_button.py b/tests/components/airgradient/test_button.py index 83de2c2f048..2440669b6e8 100644 --- a/tests/components/airgradient/test_button.py +++ b/tests/components/airgradient/test_button.py @@ -3,14 +3,16 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch -from airgradient import Config +from airgradient import AirGradientConnectionError, AirGradientError, Config from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy import SnapshotAssertion from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -97,3 +99,37 @@ async def test_cloud_creates_no_button( await hass.async_block_till_done() assert len(hass.states.async_all()) == 0 + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + ( + AirGradientConnectionError("Something happened"), + "An error occurred while communicating with the Airgradient device: Something happened", + ), + ( + AirGradientError("Something else happened"), + "An unknown error occurred while communicating with the Airgradient device: Something else happened", + ), + ], +) +async def test_exception_handling( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error_message: str, +) -> None: + """Test exception handling.""" + await setup_integration(hass, mock_config_entry) + mock_airgradient_client.request_co2_calibration.side_effect = exception + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.airgradient_calibrate_co2_sensor", + }, + blocking=True, + ) diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 01d48e852ca..4c035b09aa7 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -296,3 +296,99 @@ async def test_user_flow_works_discovery( # Verify the discovery flow was aborted assert not hass.config_entries.flow.async_progress(DOMAIN) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_new_airgradient_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.131"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data == { + CONF_HOST: "10.0.0.131", + } + + +async def test_reconfigure_flow_errors( + hass: HomeAssistant, + mock_new_airgradient_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + mock_new_airgradient_client.get_current_measures.side_effect = ( + AirGradientConnectionError() + ) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.132"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + mock_new_airgradient_client.get_current_measures.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.132"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data == { + CONF_HOST: "10.0.0.132", + } + + +async def test_reconfigure_flow_unique_id_mismatch( + hass: HomeAssistant, + mock_new_airgradient_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow aborts with unique id mismatch.""" + mock_config_entry.add_to_hass(hass) + + mock_new_airgradient_client.get_current_measures.return_value.serial_number = ( + "84fce612f5b9" + ) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.132"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" + assert mock_config_entry.data == { + CONF_HOST: "10.0.0.131", + } diff --git a/tests/components/airgradient/test_number.py b/tests/components/airgradient/test_number.py index 7aabda8f81c..2cbd72d033a 100644 --- a/tests/components/airgradient/test_number.py +++ b/tests/components/airgradient/test_number.py @@ -3,8 +3,9 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch -from airgradient import Config +from airgradient import AirGradientConnectionError, AirGradientError, Config from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy import SnapshotAssertion from homeassistant.components.airgradient.const import DOMAIN @@ -15,6 +16,7 @@ from homeassistant.components.number import ( ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -99,3 +101,37 @@ async def test_cloud_creates_no_number( await hass.async_block_till_done() assert len(hass.states.async_all()) == 0 + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + ( + AirGradientConnectionError("Something happened"), + "An error occurred while communicating with the Airgradient device: Something happened", + ), + ( + AirGradientError("Something else happened"), + "An unknown error occurred while communicating with the Airgradient device: Something else happened", + ), + ], +) +async def test_exception_handling( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error_message: str, +) -> None: + """Test exception handling.""" + await setup_integration(hass, mock_config_entry) + + mock_airgradient_client.set_display_brightness.side_effect = exception + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 50}, + target={ATTR_ENTITY_ID: "number.airgradient_display_brightness"}, + blocking=True, + ) diff --git a/tests/components/airgradient/test_select.py b/tests/components/airgradient/test_select.py index de4a7beaaa7..b8ae2cefa4e 100644 --- a/tests/components/airgradient/test_select.py +++ b/tests/components/airgradient/test_select.py @@ -3,7 +3,7 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch -from airgradient import Config +from airgradient import AirGradientConnectionError, AirGradientError, Config from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion @@ -15,6 +15,7 @@ from homeassistant.components.select import ( ) from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -94,3 +95,39 @@ async def test_cloud_creates_no_number( await hass.async_block_till_done() assert len(hass.states.async_all()) == 1 + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + ( + AirGradientConnectionError("Something happened"), + "An error occurred while communicating with the Airgradient device: Something happened", + ), + ( + AirGradientError("Something else happened"), + "An unknown error occurred while communicating with the Airgradient device: Something else happened", + ), + ], +) +async def test_exception_handling( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error_message: str, +) -> None: + """Test exception handling.""" + await setup_integration(hass, mock_config_entry) + + mock_airgradient_client.set_configuration_control.side_effect = exception + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.airgradient_configuration_source", + ATTR_OPTION: "local", + }, + blocking=True, + ) diff --git a/tests/components/airgradient/test_switch.py b/tests/components/airgradient/test_switch.py index a0cbdd17d75..475f38f554c 100644 --- a/tests/components/airgradient/test_switch.py +++ b/tests/components/airgradient/test_switch.py @@ -3,8 +3,9 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch -from airgradient import Config +from airgradient import AirGradientConnectionError, AirGradientError, Config from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy import SnapshotAssertion from homeassistant.components.airgradient.const import DOMAIN @@ -16,6 +17,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -99,3 +101,36 @@ async def test_cloud_creates_no_switch( await hass.async_block_till_done() assert len(hass.states.async_all()) == 0 + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + ( + AirGradientConnectionError("Something happened"), + "An error occurred while communicating with the Airgradient device: Something happened", + ), + ( + AirGradientError("Something else happened"), + "An unknown error occurred while communicating with the Airgradient device: Something else happened", + ), + ], +) +async def test_exception_handling( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error_message: str, +) -> None: + """Test exception handling.""" + await setup_integration(hass, mock_config_entry) + + mock_airgradient_client.enable_sharing_data.side_effect = exception + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + target={ATTR_ENTITY_ID: "switch.airgradient_post_data_to_airgradient"}, + blocking=True, + ) diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index bb44a0abeb1..0c3c0ba7c7a 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -275,6 +275,7 @@ 'config_entry': dict({ 'data': dict({ 'host': '192.168.1.100', + 'id': 0, 'port': 3000, }), 'disabled_by': None, @@ -282,7 +283,7 @@ }), 'domain': 'airzone', 'entry_id': '6e7a0798c1734ba81d26ced0e690eaec', - 'minor_version': 1, + 'minor_version': 2, 'options': dict({ }), 'pref_disable_new_entities': False, diff --git a/tests/components/airzone/test_config_flow.py b/tests/components/airzone/test_config_flow.py index 9bc0a8cedbd..65897c6da7e 100644 --- a/tests/components/airzone/test_config_flow.py +++ b/tests/components/airzone/test_config_flow.py @@ -28,6 +28,7 @@ from .util import ( HVAC_MOCK, HVAC_VERSION_MOCK, HVAC_WEBSERVER_MOCK, + USER_INPUT, ) from tests.common import MockConfigEntry @@ -81,7 +82,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result["errors"] == {} result = await hass.config_entries.flow.async_configure( - result["flow_id"], CONFIG + result["flow_id"], USER_INPUT ) await hass.async_block_till_done() @@ -94,7 +95,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result["title"] == f"Airzone {CONFIG[CONF_HOST]}:{CONFIG[CONF_PORT]}" assert result["data"][CONF_HOST] == CONFIG[CONF_HOST] assert result["data"][CONF_PORT] == CONFIG[CONF_PORT] - assert CONF_ID not in result["data"] + assert result["data"][CONF_ID] == CONFIG[CONF_ID] assert len(mock_setup_entry.mock_calls) == 1 @@ -129,7 +130,7 @@ async def test_form_invalid_system_id(hass: HomeAssistant) -> None: ), ): result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + DOMAIN, context={"source": SOURCE_USER}, data=USER_INPUT ) assert result["type"] is FlowResultType.FORM @@ -154,7 +155,7 @@ async def test_form_invalid_system_id(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert ( result["title"] - == f"Airzone {CONFIG_ID1[CONF_HOST]}:{CONFIG_ID1[CONF_PORT]}" + == f"Airzone {CONFIG_ID1[CONF_HOST]}:{CONFIG_ID1[CONF_PORT]} #{CONFIG_ID1[CONF_ID]}" ) assert result["data"][CONF_HOST] == CONFIG_ID1[CONF_HOST] assert result["data"][CONF_PORT] == CONFIG_ID1[CONF_PORT] @@ -167,6 +168,7 @@ async def test_form_duplicated_id(hass: HomeAssistant) -> None: """Test setting up duplicated entry.""" config_entry = MockConfigEntry( + minor_version=2, data=CONFIG, domain=DOMAIN, unique_id="airzone_unique_id", @@ -174,7 +176,7 @@ async def test_form_duplicated_id(hass: HomeAssistant) -> None: config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + DOMAIN, context={"source": SOURCE_USER}, data=USER_INPUT ) assert result["type"] is FlowResultType.ABORT @@ -189,7 +191,7 @@ async def test_connection_error(hass: HomeAssistant) -> None: side_effect=AirzoneError, ): result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + DOMAIN, context={"source": SOURCE_USER}, data=USER_INPUT ) assert result["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/airzone/test_coordinator.py b/tests/components/airzone/test_coordinator.py index 583758a6bee..fcdcad6a32a 100644 --- a/tests/components/airzone/test_coordinator.py +++ b/tests/components/airzone/test_coordinator.py @@ -25,6 +25,7 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: """Test ClientConnectorError on coordinator update.""" config_entry = MockConfigEntry( + minor_version=2, data=CONFIG, domain=DOMAIN, unique_id="airzone_unique_id", @@ -74,6 +75,7 @@ async def test_coordinator_new_devices( """Test new devices on coordinator update.""" config_entry = MockConfigEntry( + minor_version=2, data=CONFIG, domain=DOMAIN, unique_id="airzone_unique_id", diff --git a/tests/components/airzone/test_init.py b/tests/components/airzone/test_init.py index 293fc75acb5..a2783cb7c2f 100644 --- a/tests/components/airzone/test_init.py +++ b/tests/components/airzone/test_init.py @@ -2,14 +2,16 @@ from unittest.mock import patch +from aioairzone.const import DEFAULT_SYSTEM_ID from aioairzone.exceptions import HotWaterNotAvailable, InvalidMethod, SystemOutOfRange from homeassistant.components.airzone.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .util import CONFIG, HVAC_MOCK, HVAC_VERSION_MOCK, HVAC_WEBSERVER_MOCK +from .util import CONFIG, HVAC_MOCK, HVAC_VERSION_MOCK, HVAC_WEBSERVER_MOCK, USER_INPUT from tests.common import MockConfigEntry @@ -19,7 +21,11 @@ async def test_unique_id_migrate( ) -> None: """Test unique id migration.""" - config_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG) + config_entry = MockConfigEntry( + minor_version=2, + domain=DOMAIN, + data=CONFIG, + ) config_entry.add_to_hass(hass) with ( @@ -89,6 +95,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: """Test unload.""" config_entry = MockConfigEntry( + minor_version=2, data=CONFIG, domain=DOMAIN, unique_id="airzone_unique_id", @@ -112,3 +119,42 @@ async def test_unload_entry(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_migrate_entry_v2(hass: HomeAssistant) -> None: + """Test entry migration to v2.""" + + config_entry = MockConfigEntry( + minor_version=1, + data=USER_INPUT, + domain=DOMAIN, + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_dhw", + side_effect=HotWaterNotAvailable, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac", + return_value=HVAC_MOCK, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac_systems", + side_effect=SystemOutOfRange, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_version", + return_value=HVAC_VERSION_MOCK, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_webserver", + side_effect=InvalidMethod, + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.minor_version == 2 + assert config_entry.data.get(CONF_ID) == DEFAULT_SYSTEM_ID diff --git a/tests/components/airzone/util.py b/tests/components/airzone/util.py index b51dfb890e4..50d1964924d 100644 --- a/tests/components/airzone/util.py +++ b/tests/components/airzone/util.py @@ -55,6 +55,7 @@ from aioairzone.const import ( API_WS_AZ, API_WS_TYPE, API_ZONE_ID, + DEFAULT_SYSTEM_ID, ) from homeassistant.components.airzone.const import DOMAIN @@ -63,13 +64,18 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -CONFIG = { +USER_INPUT = { CONF_HOST: "192.168.1.100", CONF_PORT: 3000, } +CONFIG = { + **USER_INPUT, + CONF_ID: DEFAULT_SYSTEM_ID, +} + CONFIG_ID1 = { - **CONFIG, + **USER_INPUT, CONF_ID: 1, } @@ -359,6 +365,7 @@ async def async_init_integration( """Set up the Airzone integration in Home Assistant.""" config_entry = MockConfigEntry( + minor_version=2, data=CONFIG, entry_id="6e7a0798c1734ba81d26ced0e690eaec", domain=DOMAIN, diff --git a/tests/components/apcupsd/test_config_flow.py b/tests/components/apcupsd/test_config_flow.py index 88594260579..0b8386dbb5a 100644 --- a/tests/components/apcupsd/test_config_flow.py +++ b/tests/components/apcupsd/test_config_flow.py @@ -125,6 +125,8 @@ async def test_flow_works(hass: HomeAssistant) -> None: ({"UPSNAME": "Friendly Name"}, "Friendly Name"), ({"MODEL": "MODEL X"}, "MODEL X"), ({"SERIALNO": "ZZZZ"}, "ZZZZ"), + # Some models report "Blank" as serial number, which we should treat it as not reported. + ({"SERIALNO": "Blank"}, "APC UPS"), ({}, "APC UPS"), ], ) diff --git a/tests/components/apcupsd/test_init.py b/tests/components/apcupsd/test_init.py index 723ec164eae..6bb94ca2948 100644 --- a/tests/components/apcupsd/test_init.py +++ b/tests/components/apcupsd/test_init.py @@ -31,6 +31,8 @@ from tests.common import MockConfigEntry, async_fire_time_changed # Does not contain either "SERIALNO" field. # We should _not_ create devices for the entities and their IDs will not have prefixes. MOCK_MINIMAL_STATUS, + # Some models report "Blank" as SERIALNO, but we should treat it as not reported. + MOCK_MINIMAL_STATUS | {"SERIALNO": "Blank"}, ], ) async def test_async_setup_entry(hass: HomeAssistant, status: OrderedDict) -> None: @@ -41,7 +43,7 @@ async def test_async_setup_entry(hass: HomeAssistant, status: OrderedDict) -> No await async_init_integration(hass, status=status) prefix = "" - if "SERIALNO" in status: + if "SERIALNO" in status and status["SERIALNO"] != "Blank": prefix = slugify(status.get("UPSNAME", "APC UPS")) + "_" # Verify successful setup by querying the status sensor. @@ -56,6 +58,8 @@ async def test_async_setup_entry(hass: HomeAssistant, status: OrderedDict) -> No [ # We should not create device entries if SERIALNO is not reported. MOCK_MINIMAL_STATUS, + # Some models report "Blank" as SERIALNO, but we should treat it as not reported. + MOCK_MINIMAL_STATUS | {"SERIALNO": "Blank"}, # We should set the device name to be the friendly UPSNAME field if available. MOCK_MINIMAL_STATUS | {"SERIALNO": "XXXX", "UPSNAME": "MyUPS"}, # Otherwise, we should fall back to default device name --- "APC UPS". @@ -71,7 +75,7 @@ async def test_device_entry( await async_init_integration(hass, status=status) # Verify device info is properly set up. - if "SERIALNO" not in status: + if "SERIALNO" not in status or status["SERIALNO"] == "Blank": assert len(device_registry.devices) == 0 return diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index f1208877690..f91473e3b70 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -84,6 +84,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -114,6 +115,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -144,6 +146,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -174,6 +177,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -204,6 +208,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 2c88dc50577..43b4c1260dd 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -245,6 +245,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -284,6 +285,7 @@ 'last_attempted_automatic_backup': '2024-10-26T04:45:00+01:00', 'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00', 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': 3, 'days': 7, @@ -326,6 +328,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': 3, 'days': None, @@ -361,6 +364,7 @@ 'last_attempted_automatic_backup': '2024-10-27T04:45:00+01:00', 'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00', 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': 7, @@ -396,6 +400,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-18T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -432,6 +437,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -467,6 +473,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-17T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -503,6 +510,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -538,6 +546,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': 7, @@ -609,6 +618,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -644,6 +654,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': 3, 'days': None, @@ -715,6 +726,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -750,6 +762,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': 7, @@ -821,6 +834,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -856,6 +870,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T06:00:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -927,6 +942,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -962,6 +978,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-18T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1035,6 +1052,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1070,6 +1088,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1141,6 +1160,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1176,6 +1196,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-17T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1251,6 +1272,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1290,6 +1312,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1365,6 +1388,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1400,6 +1424,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': 3, 'days': 7, @@ -1471,6 +1496,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1506,6 +1532,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1577,6 +1604,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1612,6 +1640,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': 3, 'days': None, @@ -1683,6 +1712,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1718,6 +1748,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': '2024-11-14T04:55:00+01:00', + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': 7, @@ -1789,6 +1820,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1823,6 +1855,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1857,6 +1890,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1891,6 +1925,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1925,6 +1960,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1959,6 +1995,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -1993,6 +2030,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2027,6 +2065,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2061,6 +2100,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2095,6 +2135,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2129,6 +2170,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2163,6 +2205,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2197,6 +2240,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2231,6 +2275,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2265,6 +2310,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2299,6 +2345,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2333,6 +2380,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2367,6 +2415,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, 'retention': dict({ 'copies': None, 'days': None, @@ -2394,6 +2443,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2421,6 +2471,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2464,6 +2515,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2491,6 +2543,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2534,6 +2587,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2588,6 +2642,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2626,6 +2681,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2675,6 +2731,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2719,6 +2776,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2773,6 +2831,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2828,6 +2887,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2884,6 +2944,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2938,6 +2999,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -2992,6 +3054,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -3046,6 +3109,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -3101,6 +3165,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -3546,6 +3611,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -3589,6 +3655,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -3633,6 +3700,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -3698,6 +3766,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', @@ -3742,6 +3811,7 @@ 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, 'next_automatic_backup': None, + 'next_automatic_backup_additional': False, }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 4c7eaf634b3..48e6db4ae9a 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -27,7 +27,6 @@ from homeassistant.components.backup import ( DOMAIN, AgentBackup, BackupAgentPlatformProtocol, - BackupManager, BackupReaderWriterError, Folder, LocalBackupAgent, @@ -38,8 +37,6 @@ from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.components.backup.manager import ( BackupManagerError, BackupManagerState, - CoreBackupReaderWriter, - CreateBackupEvent, CreateBackupStage, CreateBackupState, NewBackup, @@ -140,23 +137,31 @@ async def test_async_create_backup( ) -async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: - """Test generate backup.""" - manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - manager.last_event = CreateBackupEvent( - stage=None, state=CreateBackupState.IN_PROGRESS +@pytest.mark.usefixtures("mock_backup_generation") +async def test_create_backup_when_busy( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test generate backup with busy manager.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": [LOCAL_AGENT_ID]} ) - with pytest.raises(HomeAssistantError, match="Backup manager busy"): - await manager.async_create_backup( - agent_ids=[LOCAL_AGENT_ID], - include_addons=[], - include_all_addons=False, - include_database=True, - include_folders=[], - include_homeassistant=True, - name=None, - password=None, - ) + result = await ws_client.receive_json() + + assert result["success"] is True + + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": [LOCAL_AGENT_ID]} + ) + result = await ws_client.receive_json() + + assert result["success"] is False + assert result["error"]["code"] == "home_assistant_error" + assert result["error"]["message"] == "Backup manager busy: create_backup" @pytest.mark.parametrize( @@ -223,10 +228,9 @@ async def test_create_backup_wrong_parameters( {"password": "pass123"}, ], ) -async def test_async_initiate_backup( +async def test_initiate_backup( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - caplog: pytest.LogCaptureFixture, mocked_json_bytes: Mock, mocked_tarfile: Mock, generate_backup_id: MagicMock, @@ -239,10 +243,7 @@ async def test_async_initiate_backup( """Test generate backup.""" local_agent = local_backup_platform.CoreLocalBackupAgent(hass) remote_agent = BackupAgentTest("remote", backups=[]) - agents = { - f"backup.{local_agent.name}": local_agent, - f"test.{remote_agent.name}": remote_agent, - } + with patch( "homeassistant.components.backup.backup.async_get_backup_agents" ) as core_get_backup_agents: @@ -275,6 +276,7 @@ async def test_async_initiate_backup( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -348,7 +350,7 @@ async def test_async_initiate_backup( }, "name": name, "protected": bool(password), - "slug": ANY, + "slug": backup_id, "type": "partial", "version": 2, } @@ -364,7 +366,7 @@ async def test_async_initiate_backup( assert backup_agent_ids == agent_ids assert backup_data == { "addons": [], - "backup_id": ANY, + "backup_id": backup_id, "database_included": include_database, "date": ANY, "failed_agent_ids": [], @@ -377,16 +379,6 @@ async def test_async_initiate_backup( "with_automatic_settings": False, } - for agent_id in agent_ids: - agent = agents[agent_id] - assert len(agent._backups) == 1 - agent_backup = agent._backups[backup_data["backup_id"]] - assert agent_backup.backup_id == backup_data["backup_id"] - assert agent_backup.date == backup_data["date"] - assert agent_backup.name == backup_data["name"] - assert agent_backup.protected == backup_data["protected"] - assert agent_backup.size == backup_data["size"] - outer_tar = mocked_tarfile.return_value core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value expected_files = [call(hass.config.path(), arcname="data", recursive=False)] + [ @@ -397,12 +389,12 @@ async def test_async_initiate_backup( tar_file_path = str(mocked_tarfile.call_args_list[0][0][0]) backup_directory = hass.config.path(backup_directory) - assert tar_file_path == f"{backup_directory}/{backup_data['backup_id']}.tar" + assert tar_file_path == f"{backup_directory}/{backup_id}.tar" @pytest.mark.usefixtures("mock_backup_generation") @pytest.mark.parametrize("exception", [BackupAgentError("Boom!"), Exception("Boom!")]) -async def test_async_initiate_backup_with_agent_error( +async def test_initiate_backup_with_agent_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, generate_backup_id: MagicMock, @@ -521,6 +513,7 @@ async def test_async_initiate_backup_with_agent_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id( @@ -616,6 +609,7 @@ async def test_async_initiate_backup_with_agent_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await hass.async_block_till_done() @@ -842,7 +836,7 @@ async def test_create_backup_failure_raises_issue( @pytest.mark.parametrize( "exception", [BackupReaderWriterError("Boom!"), BaseException("Boom!")] ) -async def test_async_initiate_backup_non_agent_upload_error( +async def test_initiate_backup_non_agent_upload_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, generate_backup_id: MagicMock, @@ -884,6 +878,7 @@ async def test_async_initiate_backup_non_agent_upload_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -950,7 +945,7 @@ async def test_async_initiate_backup_non_agent_upload_error( @pytest.mark.parametrize( "exception", [BackupReaderWriterError("Boom!"), Exception("Boom!")] ) -async def test_async_initiate_backup_with_task_error( +async def test_initiate_backup_with_task_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, generate_backup_id: MagicMock, @@ -995,6 +990,7 @@ async def test_async_initiate_backup_with_task_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -1100,6 +1096,7 @@ async def test_initiate_backup_file_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -1167,35 +1164,6 @@ async def test_initiate_backup_file_error( assert unlink_mock.call_count == unlink_call_count -async def test_loading_platforms( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test loading backup platforms.""" - manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - - assert not manager.platforms - - get_agents_mock = AsyncMock(return_value=[]) - - await setup_backup_platform( - hass, - domain="test", - platform=Mock( - async_pre_backup=AsyncMock(), - async_post_backup=AsyncMock(), - async_get_backup_agents=get_agents_mock, - ), - ) - await manager.load_platforms() - await hass.async_block_till_done() - - assert len(manager.platforms) == 1 - assert "Loaded 1 platforms" in caplog.text - - get_agents_mock.assert_called_once_with(hass) - - class LocalBackupAgentTest(BackupAgentTest, LocalBackupAgent): """Local backup agent.""" @@ -1621,6 +1589,7 @@ async def test_receive_backup_agent_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id( @@ -1699,6 +1668,7 @@ async def test_receive_backup_agent_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await hass.async_block_till_done() @@ -1760,6 +1730,7 @@ async def test_receive_backup_non_agent_upload_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -1881,6 +1852,7 @@ async def test_receive_backup_file_write_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -1990,6 +1962,7 @@ async def test_receive_backup_read_tar_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -2158,6 +2131,7 @@ async def test_receive_backup_file_read_error( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -2509,3 +2483,265 @@ async def test_restore_backup_wrong_parameters( mocked_write_text.assert_not_called() mocked_service_call.assert_not_called() + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_restore_backup_when_busy( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test restore backup with busy manager.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": [LOCAL_AGENT_ID]} + ) + result = await ws_client.receive_json() + + assert result["success"] is True + + await ws_client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": TEST_BACKUP_ABC123.backup_id, + "agent_id": LOCAL_AGENT_ID, + } + ) + result = await ws_client.receive_json() + + assert result["success"] is False + assert result["error"]["code"] == "home_assistant_error" + assert result["error"]["message"] == "Backup manager busy: create_backup" + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("exception", "error_code", "error_message"), + [ + (BackupAgentError("Boom!"), "home_assistant_error", "Boom!"), + (Exception("Boom!"), "unknown_error", "Unknown error"), + ], +) +async def test_restore_backup_agent_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + exception: Exception, + error_code: str, + error_message: str, +) -> None: + """Test restore backup with agent error.""" + remote_agent = BackupAgentTest("remote", backups=[TEST_BACKUP_ABC123]) + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open"), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch.object( + remote_agent, "async_download_backup", side_effect=exception + ) as download_mock, + ): + await ws_client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": TEST_BACKUP_ABC123.backup_id, + "agent_id": remote_agent.agent_id, + } + ) + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.RESTORE_BACKUP, + "stage": None, + "state": RestoreBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.RESTORE_BACKUP, + "stage": None, + "state": RestoreBackupState.FAILED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert not result["success"] + assert result["error"]["code"] == error_code + assert result["error"]["message"] == error_message + + assert download_mock.call_count == 1 + assert mocked_write_text.call_count == 0 + assert mocked_service_call.call_count == 0 + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ( + "open_call_count", + "open_exception", + "write_call_count", + "write_exception", + "close_call_count", + "close_exception", + "write_text_call_count", + "write_text_exception", + "validate_password_call_count", + ), + [ + ( + 1, + OSError("Boom!"), + 0, + None, + 0, + None, + 0, + None, + 0, + ), + ( + 1, + None, + 1, + OSError("Boom!"), + 1, + None, + 0, + None, + 0, + ), + ( + 1, + None, + 1, + None, + 1, + OSError("Boom!"), + 0, + None, + 0, + ), + ( + 1, + None, + 1, + None, + 1, + None, + 1, + OSError("Boom!"), + 1, + ), + ], +) +async def test_restore_backup_file_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + open_call_count: int, + open_exception: list[Exception | None], + write_call_count: int, + write_exception: Exception | None, + close_call_count: int, + close_exception: list[Exception | None], + write_text_call_count: int, + write_text_exception: Exception | None, + validate_password_call_count: int, +) -> None: + """Test restore backup with file error.""" + remote_agent = BackupAgentTest("remote", backups=[TEST_BACKUP_ABC123]) + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + open_mock = mock_open() + open_mock.side_effect = open_exception + open_mock.return_value.write.side_effect = write_exception + open_mock.return_value.close.side_effect = close_exception + + with ( + patch("pathlib.Path.open", open_mock), + patch( + "pathlib.Path.write_text", side_effect=write_text_exception + ) as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch( + "homeassistant.components.backup.manager.validate_password" + ) as validate_password_mock, + patch.object(remote_agent, "async_download_backup") as download_mock, + ): + download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) + await ws_client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": TEST_BACKUP_ABC123.backup_id, + "agent_id": remote_agent.agent_id, + } + ) + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.RESTORE_BACKUP, + "stage": None, + "state": RestoreBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.RESTORE_BACKUP, + "stage": None, + "state": RestoreBackupState.FAILED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert not result["success"] + assert result["error"]["code"] == "unknown_error" + assert result["error"]["message"] == "Unknown error" + + assert download_mock.call_count == 1 + assert validate_password_mock.call_count == validate_password_call_count + assert open_mock.call_count == open_call_count + assert open_mock.return_value.write.call_count == write_call_count + assert open_mock.return_value.close.call_count == close_call_count + assert mocked_write_text.call_count == write_text_call_count + assert mocked_service_call.call_count == 0 diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 44a470053a5..52c04474162 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1310,6 +1310,7 @@ async def test_config_update_errors( "attempted_backup_time", "completed_backup_time", "scheduled_backup_time", + "additional_backup", "backup_calls_1", "backup_calls_2", "call_args", @@ -1325,6 +1326,7 @@ async def test_config_update_errors( "2024-11-12T04:55:00+01:00", "2024-11-12T04:55:00+01:00", "2024-11-12T04:55:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1345,6 +1347,7 @@ async def test_config_update_errors( "2024-11-12T04:55:00+01:00", "2024-11-12T04:55:00+01:00", "2024-11-12T04:55:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1364,6 +1367,7 @@ async def test_config_update_errors( "2024-11-18T04:55:00+01:00", "2024-11-18T04:55:00+01:00", "2024-11-18T04:55:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1387,6 +1391,7 @@ async def test_config_update_errors( "2024-11-18T03:45:00+01:00", "2024-11-18T03:45:00+01:00", "2024-11-18T03:45:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1406,6 +1411,7 @@ async def test_config_update_errors( "2024-11-12T03:45:00+01:00", "2024-11-12T03:45:00+01:00", "2024-11-12T03:45:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1425,6 +1431,7 @@ async def test_config_update_errors( "2024-11-13T04:55:00+01:00", "2024-11-13T04:55:00+01:00", "2024-11-13T04:55:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1444,6 +1451,7 @@ async def test_config_update_errors( "2024-11-11T04:45:00+01:00", "2024-11-11T04:45:00+01:00", None, + False, 0, 0, None, @@ -1463,6 +1471,7 @@ async def test_config_update_errors( "2024-11-11T04:45:00+01:00", "2024-11-11T04:45:00+01:00", None, + False, 0, 0, None, @@ -1482,6 +1491,7 @@ async def test_config_update_errors( "2024-11-12T04:55:00+01:00", "2024-11-12T04:55:00+01:00", "2024-11-12T04:55:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1501,6 +1511,7 @@ async def test_config_update_errors( "2024-11-12T04:55:00+01:00", # missed event uses daily schedule once "2024-11-12T04:55:00+01:00", # missed event uses daily schedule once "2024-11-12T04:55:00+01:00", + True, 1, 1, BACKUP_CALL, @@ -1520,6 +1531,7 @@ async def test_config_update_errors( "2024-10-26T04:45:00+01:00", "2024-10-26T04:45:00+01:00", None, + False, 0, 0, None, @@ -1539,6 +1551,7 @@ async def test_config_update_errors( "2024-11-12T04:55:00+01:00", # attempted to create backup but failed "2024-11-11T04:45:00+01:00", "2024-11-12T04:55:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1558,6 +1571,7 @@ async def test_config_update_errors( "2024-11-12T04:55:00+01:00", # attempted to create backup but failed "2024-11-11T04:45:00+01:00", "2024-11-12T04:55:00+01:00", + False, 1, 2, BACKUP_CALL, @@ -1579,6 +1593,7 @@ async def test_config_schedule_logic( attempted_backup_time: str, completed_backup_time: str, scheduled_backup_time: str, + additional_backup: bool, backup_calls_1: int, backup_calls_2: int, call_args: Any, @@ -1630,6 +1645,7 @@ async def test_config_schedule_logic( await client.send_json_auto_id({"type": "backup/info"}) result = await client.receive_json() assert result["result"]["next_automatic_backup"] == scheduled_backup_time + assert result["result"]["next_automatic_backup_additional"] == additional_backup freezer.move_to(time_1) async_fire_time_changed(hass) diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 112e71ec2db..db742525a48 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -205,6 +205,7 @@ async def test_agents_list_backups_fail_cloud( "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, "next_automatic_backup": None, + "next_automatic_backup_additional": False, } diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 80a056a6ea0..54aa30b3fcf 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -11,7 +11,7 @@ import pytest from syrupy import SnapshotAssertion import yaml -from homeassistant.components import conversation, cover, media_player +from homeassistant.components import conversation, cover, media_player, weather from homeassistant.components.conversation import default_agent from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY from homeassistant.components.conversation.default_agent import METADATA_CUSTOM_SENTENCE @@ -3152,3 +3152,29 @@ async def test_handle_intents_with_response_errors( assert response is not None and response.error_code == error_code else: assert response is None + + +@pytest.mark.usefixtures("init_components") +async def test_state_names_are_not_translated( + hass: HomeAssistant, + init_components: None, +) -> None: + """Test that state names are not translated in responses.""" + await async_setup_component(hass, "weather", {}) + + hass.states.async_set("weather.test_weather", weather.ATTR_CONDITION_PARTLYCLOUDY) + expose_entity(hass, "weather.test_weather", True) + + with patch( + "homeassistant.helpers.template.Template.async_render" + ) as mock_async_render: + result = await conversation.async_converse( + hass, "what is the weather like?", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER + mock_async_render.assert_called_once() + + assert ( + mock_async_render.call_args.args[0]["state"].state + == weather.ATTR_CONDITION_PARTLYCLOUDY + ) diff --git a/tests/components/coolmaster/test_init.py b/tests/components/coolmaster/test_init.py index 4a90d0d9276..f8ff761517f 100644 --- a/tests/components/coolmaster/test_init.py +++ b/tests/components/coolmaster/test_init.py @@ -1,6 +1,5 @@ """The test for the Coolmaster integration.""" -from homeassistant.components.coolmaster.const import DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant @@ -20,8 +19,6 @@ async def test_unload_entry( load_int: ConfigEntry, ) -> None: """Test Coolmaster unloading an entry.""" - assert load_int.entry_id in hass.data.get(DOMAIN) await hass.config_entries.async_unload(load_int.entry_id) await hass.async_block_till_done() assert load_int.state is ConfigEntryState.NOT_LOADED - assert not hass.data.get(DOMAIN) diff --git a/tests/components/directv/test_init.py b/tests/components/directv/test_init.py index 4bfe8e2121f..102c338e757 100644 --- a/tests/components/directv/test_init.py +++ b/tests/components/directv/test_init.py @@ -1,6 +1,5 @@ """Tests for the DirecTV integration.""" -from homeassistant.components.directv.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -24,11 +23,9 @@ async def test_unload_config_entry( """Test the DirecTV configuration entry unloading.""" entry = await setup_integration(hass, aioclient_mock) - assert entry.entry_id in hass.data[DOMAIN] assert entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() - assert entry.entry_id not in hass.data[DOMAIN] assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/duke_energy/conftest.py b/tests/components/duke_energy/conftest.py index ed4182f450f..f74ef43bf07 100644 --- a/tests/components/duke_energy/conftest.py +++ b/tests/components/duke_energy/conftest.py @@ -11,12 +11,12 @@ from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr index f936a9db76e..e9bf8378d79 100644 --- a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr @@ -255,7 +255,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:transmission-tower', + 'original_icon': None, 'original_name': 'Grid status', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -269,7 +269,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Enpower 654321 Grid status', - 'icon': 'mdi:transmission-tower', }), 'context': , 'entity_id': 'binary_sensor.enpower_654321_grid_status', diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index 76835098f27..4254ffe961a 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -88,7 +88,7 @@ }), }), 'original_device_class': 'power', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -101,7 +101,6 @@ 'attributes': dict({ 'device_class': 'power', 'friendly_name': 'Envoy <> Current power production', - 'icon': 'mdi:flash', 'state_class': 'measurement', 'unit_of_measurement': 'kW', }), @@ -140,7 +139,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -153,7 +152,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Energy production today', - 'icon': 'mdi:flash', 'state_class': 'total_increasing', 'unit_of_measurement': 'kWh', }), @@ -190,7 +188,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -203,7 +201,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': 'kWh', }), 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days', @@ -241,7 +238,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -254,7 +251,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': 'total_increasing', 'unit_of_measurement': 'MWh', }), @@ -321,7 +317,7 @@ 'options': dict({ }), 'original_device_class': 'power', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -334,7 +330,6 @@ 'attributes': dict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': 'measurement', 'unit_of_measurement': 'W', }), @@ -365,7 +360,7 @@ 'options': dict({ }), 'original_device_class': 'timestamp', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -522,7 +517,7 @@ }), }), 'original_device_class': 'power', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -535,7 +530,6 @@ 'attributes': dict({ 'device_class': 'power', 'friendly_name': 'Envoy <> Current power production', - 'icon': 'mdi:flash', 'state_class': 'measurement', 'unit_of_measurement': 'kW', }), @@ -574,7 +568,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -587,7 +581,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Energy production today', - 'icon': 'mdi:flash', 'state_class': 'total_increasing', 'unit_of_measurement': 'kWh', }), @@ -624,7 +617,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -637,7 +630,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': 'kWh', }), 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days', @@ -675,7 +667,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -688,7 +680,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': 'total_increasing', 'unit_of_measurement': 'MWh', }), @@ -755,7 +746,7 @@ 'options': dict({ }), 'original_device_class': 'power', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -768,7 +759,6 @@ 'attributes': dict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': 'measurement', 'unit_of_measurement': 'W', }), @@ -799,7 +789,7 @@ 'options': dict({ }), 'original_device_class': 'timestamp', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -996,7 +986,7 @@ }), }), 'original_device_class': 'power', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1009,7 +999,6 @@ 'attributes': dict({ 'device_class': 'power', 'friendly_name': 'Envoy <> Current power production', - 'icon': 'mdi:flash', 'state_class': 'measurement', 'unit_of_measurement': 'kW', }), @@ -1048,7 +1037,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1061,7 +1050,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Energy production today', - 'icon': 'mdi:flash', 'state_class': 'total_increasing', 'unit_of_measurement': 'kWh', }), @@ -1098,7 +1086,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1111,7 +1099,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': 'kWh', }), 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days', @@ -1149,7 +1136,7 @@ }), }), 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1162,7 +1149,6 @@ 'attributes': dict({ 'device_class': 'energy', 'friendly_name': 'Envoy <> Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': 'total_increasing', 'unit_of_measurement': 'MWh', }), @@ -1229,7 +1215,7 @@ 'options': dict({ }), 'original_device_class': 'power', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1242,7 +1228,6 @@ 'attributes': dict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': 'measurement', 'unit_of_measurement': 'W', }), @@ -1273,7 +1258,7 @@ 'options': dict({ }), 'original_device_class': 'timestamp', - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index d6a523a3e15..0f251b5e859 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -30,7 +30,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -45,7 +45,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -86,7 +85,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -101,7 +100,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -143,7 +141,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -158,7 +156,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -201,7 +198,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -216,7 +213,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -253,7 +249,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -268,7 +264,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -303,7 +298,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -318,7 +313,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.inverter_1_last_reported', @@ -359,7 +353,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -374,7 +368,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -417,7 +410,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -432,7 +425,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -475,7 +467,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -490,7 +482,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -533,7 +524,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -548,7 +539,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -589,7 +579,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -604,7 +594,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -646,7 +635,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -661,7 +650,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -702,7 +690,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -717,7 +705,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -759,7 +746,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -774,7 +761,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -814,7 +800,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -829,7 +815,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -869,7 +854,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -884,7 +869,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -927,7 +911,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -942,7 +926,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -985,7 +968,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1000,7 +983,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1043,7 +1025,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1058,7 +1040,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1101,7 +1082,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1116,7 +1097,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1159,7 +1139,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1174,7 +1154,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1197,7 +1176,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -1209,7 +1188,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1223,7 +1202,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', @@ -1244,7 +1222,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -1256,7 +1234,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1270,7 +1248,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', @@ -1297,7 +1274,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -1309,7 +1286,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1324,7 +1301,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -1356,7 +1332,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -1368,7 +1344,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1383,7 +1359,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -1429,7 +1404,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1444,7 +1419,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1484,7 +1458,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1499,7 +1473,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -1538,7 +1511,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'powerfactor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1553,7 +1526,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -1595,7 +1567,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1610,7 +1582,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1653,7 +1624,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1668,7 +1639,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1711,7 +1681,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1726,7 +1696,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1763,7 +1732,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1778,7 +1747,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -1813,7 +1781,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -1828,7 +1796,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.inverter_1_last_reported', @@ -2256,7 +2223,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Aggregated available battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2271,7 +2238,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy_storage', 'friendly_name': 'Envoy 1234 Aggregated available battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -2305,7 +2271,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Aggregated Battery capacity', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2320,7 +2286,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy_storage', 'friendly_name': 'Envoy 1234 Aggregated Battery capacity', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -2354,7 +2319,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Aggregated battery soc', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2369,7 +2334,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Envoy 1234 Aggregated battery soc', - 'icon': 'mdi:flash', 'unit_of_measurement': '%', }), 'context': , @@ -2403,7 +2367,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Available ACB battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2418,7 +2382,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy_storage', 'friendly_name': 'Envoy 1234 Available ACB battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -2452,7 +2415,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Available battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2467,7 +2430,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Available battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -2509,7 +2471,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2524,7 +2486,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -2559,7 +2520,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Battery', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2574,7 +2535,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Envoy 1234 Battery', - 'icon': 'mdi:flash', 'unit_of_measurement': '%', }), 'context': , @@ -2608,7 +2568,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Battery capacity', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2623,7 +2583,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Battery capacity', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -2665,7 +2624,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2680,7 +2639,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -2723,7 +2681,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2738,7 +2696,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -2781,7 +2738,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2796,7 +2753,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -2839,7 +2795,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2854,7 +2810,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -2897,7 +2852,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2912,7 +2867,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -2955,7 +2909,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -2970,7 +2924,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3011,7 +2964,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3026,7 +2979,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -3068,7 +3020,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3083,7 +3035,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3124,7 +3075,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3139,7 +3090,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -3181,7 +3131,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3196,7 +3146,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3236,7 +3185,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3251,7 +3200,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3291,7 +3239,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3306,7 +3254,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3346,7 +3293,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3361,7 +3308,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3401,7 +3347,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3416,7 +3362,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3456,7 +3401,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3471,7 +3416,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3511,7 +3455,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3526,7 +3470,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3566,7 +3509,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3581,7 +3524,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3621,7 +3563,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3636,7 +3578,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3679,7 +3620,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3694,7 +3635,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3737,7 +3677,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3752,7 +3692,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3795,7 +3734,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3810,7 +3749,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3853,7 +3791,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3868,7 +3806,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3911,7 +3848,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3926,7 +3863,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -3969,7 +3905,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -3984,7 +3920,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -4027,7 +3962,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4042,7 +3977,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -4085,7 +4019,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4100,7 +4034,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -4143,7 +4076,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4158,7 +4091,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -4201,7 +4133,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4216,7 +4148,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -4259,7 +4190,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4274,7 +4205,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -4297,7 +4227,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -4309,7 +4239,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4323,7 +4253,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', @@ -4344,7 +4273,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -4356,7 +4285,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4370,7 +4299,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', @@ -4391,7 +4319,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -4403,7 +4331,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4417,7 +4345,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', @@ -4438,7 +4365,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -4450,7 +4377,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4464,7 +4391,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', @@ -4485,7 +4411,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -4497,7 +4423,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4511,7 +4437,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', @@ -4532,7 +4457,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -4544,7 +4469,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4558,7 +4483,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', @@ -4579,7 +4503,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -4591,7 +4515,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4605,7 +4529,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', @@ -4626,7 +4549,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -4638,7 +4561,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4652,7 +4575,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', @@ -4679,7 +4601,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -4691,7 +4613,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4706,7 +4628,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -4738,7 +4659,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -4750,7 +4671,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4765,7 +4686,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -4797,7 +4717,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -4809,7 +4729,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4824,7 +4744,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -4856,7 +4775,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -4868,7 +4787,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4883,7 +4802,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -4915,7 +4833,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -4927,7 +4845,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -4942,7 +4860,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -4974,7 +4891,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -4986,7 +4903,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5001,7 +4918,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -5033,7 +4949,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -5045,7 +4961,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5060,7 +4976,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -5092,7 +5007,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -5104,7 +5019,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5119,7 +5034,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -5165,7 +5079,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5180,7 +5094,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -5223,7 +5136,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5238,7 +5151,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -5281,7 +5193,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5296,7 +5208,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -5339,7 +5250,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5354,7 +5265,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -5394,7 +5304,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5409,7 +5319,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5448,7 +5357,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5463,7 +5372,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5502,7 +5410,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5517,7 +5425,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5556,7 +5463,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5571,7 +5478,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5610,7 +5516,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'powerfactor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5625,7 +5531,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5664,7 +5569,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5679,7 +5584,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5718,7 +5622,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5733,7 +5637,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5772,7 +5675,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5787,7 +5690,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -5829,7 +5731,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5844,7 +5746,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -5887,7 +5788,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5902,7 +5803,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -5945,7 +5845,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -5960,7 +5860,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6003,7 +5902,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6018,7 +5917,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6053,7 +5951,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Reserve battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6068,7 +5966,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Reserve battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -6102,7 +5999,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Reserve battery level', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6117,7 +6014,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Envoy 1234 Reserve battery level', - 'icon': 'mdi:flash', 'unit_of_measurement': '%', }), 'context': , @@ -6159,7 +6055,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6174,7 +6070,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6217,7 +6112,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6232,7 +6127,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6275,7 +6169,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6290,7 +6184,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6333,7 +6226,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6348,7 +6241,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6391,7 +6283,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6406,7 +6298,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6449,7 +6340,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6464,7 +6355,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6507,7 +6397,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6522,7 +6412,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6565,7 +6454,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6580,7 +6469,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6617,7 +6505,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6632,7 +6520,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -6667,7 +6554,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6682,7 +6569,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.inverter_1_last_reported', @@ -6954,7 +6840,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Available battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -6969,7 +6855,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Available battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -7011,7 +6896,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7026,7 +6911,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7061,7 +6945,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Battery', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7076,7 +6960,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Envoy 1234 Battery', - 'icon': 'mdi:flash', 'unit_of_measurement': '%', }), 'context': , @@ -7110,7 +6993,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Battery capacity', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7125,7 +7008,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Battery capacity', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -7167,7 +7049,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7182,7 +7064,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7225,7 +7106,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7240,7 +7121,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7283,7 +7163,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7298,7 +7178,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7341,7 +7220,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7356,7 +7235,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7399,7 +7277,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7414,7 +7292,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7457,7 +7334,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7472,7 +7349,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7513,7 +7389,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7528,7 +7404,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -7570,7 +7445,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7585,7 +7460,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7626,7 +7500,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7641,7 +7515,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -7683,7 +7556,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7698,7 +7571,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7738,7 +7610,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7753,7 +7625,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7793,7 +7664,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7808,7 +7679,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7848,7 +7718,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7863,7 +7733,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7903,7 +7772,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7918,7 +7787,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -7958,7 +7826,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -7973,7 +7841,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8013,7 +7880,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8028,7 +7895,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8068,7 +7934,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8083,7 +7949,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8123,7 +7988,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8138,7 +8003,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8181,7 +8045,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8196,7 +8060,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8239,7 +8102,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8254,7 +8117,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8297,7 +8159,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8312,7 +8174,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8355,7 +8216,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8370,7 +8231,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8413,7 +8273,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8428,7 +8288,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8471,7 +8330,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8486,7 +8345,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8529,7 +8387,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8544,7 +8402,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8587,7 +8444,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8602,7 +8459,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8645,7 +8501,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8660,7 +8516,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8703,7 +8558,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8718,7 +8573,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8761,7 +8615,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8776,7 +8630,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -8799,7 +8652,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -8811,7 +8664,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8825,7 +8678,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', @@ -8846,7 +8698,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -8858,7 +8710,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8872,7 +8724,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', @@ -8893,7 +8744,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -8905,7 +8756,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8919,7 +8770,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', @@ -8940,7 +8790,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -8952,7 +8802,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -8966,7 +8816,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', @@ -8987,7 +8836,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -8999,7 +8848,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9013,7 +8862,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', @@ -9034,7 +8882,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -9046,7 +8894,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9060,7 +8908,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', @@ -9081,7 +8928,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -9093,7 +8940,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9107,7 +8954,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', @@ -9128,7 +8974,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -9140,7 +8986,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9154,7 +9000,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', @@ -9181,7 +9026,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -9193,7 +9038,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9208,7 +9053,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9240,7 +9084,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -9252,7 +9096,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9267,7 +9111,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9299,7 +9142,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -9311,7 +9154,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9326,7 +9169,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9358,7 +9200,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -9370,7 +9212,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9385,7 +9227,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9417,7 +9258,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -9429,7 +9270,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9444,7 +9285,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9476,7 +9316,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -9488,7 +9328,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9503,7 +9343,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9535,7 +9374,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -9547,7 +9386,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9562,7 +9401,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9594,7 +9432,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -9606,7 +9444,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9621,7 +9459,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -9667,7 +9504,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9682,7 +9519,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -9725,7 +9561,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9740,7 +9576,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -9783,7 +9618,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9798,7 +9633,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -9841,7 +9675,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9856,7 +9690,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -9896,7 +9729,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9911,7 +9744,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -9950,7 +9782,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -9965,7 +9797,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -10004,7 +9835,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10019,7 +9850,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -10058,7 +9888,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10073,7 +9903,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -10112,7 +9941,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'powerfactor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10127,7 +9956,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -10166,7 +9994,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10181,7 +10009,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -10220,7 +10047,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10235,7 +10062,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -10274,7 +10100,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10289,7 +10115,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -10331,7 +10156,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10346,7 +10171,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10389,7 +10213,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10404,7 +10228,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10447,7 +10270,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10462,7 +10285,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10505,7 +10327,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10520,7 +10342,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10555,7 +10376,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Reserve battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10570,7 +10391,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Reserve battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -10604,7 +10424,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Reserve battery level', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10619,7 +10439,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Envoy 1234 Reserve battery level', - 'icon': 'mdi:flash', 'unit_of_measurement': '%', }), 'context': , @@ -10661,7 +10480,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10676,7 +10495,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10719,7 +10537,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10734,7 +10552,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10777,7 +10594,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10792,7 +10609,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10835,7 +10651,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10850,7 +10666,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10893,7 +10708,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10908,7 +10723,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -10951,7 +10765,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -10966,7 +10780,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11009,7 +10822,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11024,7 +10837,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11067,7 +10879,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11082,7 +10894,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11119,7 +10930,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11134,7 +10945,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11169,7 +10979,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11184,7 +10994,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.inverter_1_last_reported', @@ -11551,7 +11360,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Available battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11566,7 +11375,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Available battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -11608,7 +11416,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11623,7 +11431,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11666,7 +11473,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11681,7 +11488,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11724,7 +11530,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11739,7 +11545,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11782,7 +11587,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11797,7 +11602,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11832,7 +11636,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Battery', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11847,7 +11651,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Envoy 1234 Battery', - 'icon': 'mdi:flash', 'unit_of_measurement': '%', }), 'context': , @@ -11881,7 +11684,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Battery capacity', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11896,7 +11699,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Battery capacity', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -11938,7 +11740,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current battery discharge', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -11953,7 +11755,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current battery discharge', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -11996,7 +11797,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current battery discharge l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12011,7 +11812,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current battery discharge l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12054,7 +11854,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current battery discharge l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12069,7 +11869,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current battery discharge l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12112,7 +11911,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current battery discharge l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12127,7 +11926,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current battery discharge l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12170,7 +11968,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12185,7 +11983,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12228,7 +12025,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12243,7 +12040,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12286,7 +12082,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12301,7 +12097,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12344,7 +12139,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12359,7 +12154,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12402,7 +12196,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12417,7 +12211,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12460,7 +12253,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12475,7 +12268,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12518,7 +12310,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12533,7 +12325,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12576,7 +12367,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12591,7 +12382,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12634,7 +12424,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12649,7 +12439,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12692,7 +12481,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12707,7 +12496,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12750,7 +12538,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12765,7 +12553,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12808,7 +12595,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12823,7 +12610,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -12864,7 +12650,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12879,7 +12665,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -12919,7 +12704,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12934,7 +12719,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -12974,7 +12758,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -12989,7 +12773,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -13029,7 +12812,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13044,7 +12827,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -13086,7 +12868,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13101,7 +12883,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13144,7 +12925,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13159,7 +12940,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13202,7 +12982,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13217,7 +12997,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13260,7 +13039,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13275,7 +13054,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13316,7 +13094,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13331,7 +13109,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -13371,7 +13148,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13386,7 +13163,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days l1', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -13426,7 +13202,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13441,7 +13217,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days l2', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -13481,7 +13256,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13496,7 +13271,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days l3', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -13538,7 +13312,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13553,7 +13327,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13596,7 +13369,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13611,7 +13384,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13654,7 +13426,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13669,7 +13441,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13712,7 +13483,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13727,7 +13498,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13767,7 +13537,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13782,7 +13552,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13822,7 +13591,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13837,7 +13606,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13877,7 +13645,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13892,7 +13660,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13932,7 +13699,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -13947,7 +13714,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -13987,7 +13753,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14002,7 +13768,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14042,7 +13807,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14057,7 +13822,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14097,7 +13861,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14112,7 +13876,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14152,7 +13915,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14167,7 +13930,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14207,7 +13969,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14222,7 +13984,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency storage CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14262,7 +14023,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14277,7 +14038,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency storage CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14317,7 +14077,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14332,7 +14092,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency storage CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14372,7 +14131,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14387,7 +14146,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency storage CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14430,7 +14188,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14445,7 +14203,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14488,7 +14245,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14503,7 +14260,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14546,7 +14302,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14561,7 +14317,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14604,7 +14359,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14619,7 +14374,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14662,7 +14416,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy charged', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14677,7 +14431,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy charged', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14720,7 +14473,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy charged l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14735,7 +14488,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14778,7 +14530,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy charged l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14793,7 +14545,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14836,7 +14587,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy charged l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14851,7 +14602,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14894,7 +14644,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy discharged', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14909,7 +14659,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -14952,7 +14701,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy discharged l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -14967,7 +14716,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15010,7 +14758,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy discharged l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15025,7 +14773,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15068,7 +14815,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime battery energy discharged l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15083,7 +14830,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15126,7 +14872,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15141,7 +14887,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15184,7 +14929,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15199,7 +14944,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15242,7 +14986,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15257,7 +15001,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15300,7 +15043,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15315,7 +15058,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15358,7 +15100,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15373,7 +15115,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15416,7 +15157,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15431,7 +15172,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15474,7 +15214,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15489,7 +15229,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15532,7 +15271,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15547,7 +15286,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15590,7 +15328,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15605,7 +15343,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15648,7 +15385,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15663,7 +15400,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15706,7 +15442,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15721,7 +15457,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15764,7 +15499,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15779,7 +15514,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15822,7 +15556,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15837,7 +15571,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15880,7 +15613,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15895,7 +15628,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15938,7 +15670,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -15953,7 +15685,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -15996,7 +15727,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16011,7 +15742,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -16034,7 +15764,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -16046,7 +15776,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16060,7 +15790,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', @@ -16081,7 +15810,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -16093,7 +15822,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16107,7 +15836,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', @@ -16128,7 +15856,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -16140,7 +15868,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16154,7 +15882,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', @@ -16175,7 +15902,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -16187,7 +15914,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16201,7 +15928,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', @@ -16222,7 +15948,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -16234,7 +15960,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16248,7 +15974,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', @@ -16269,7 +15994,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -16281,7 +16006,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16295,7 +16020,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', @@ -16316,7 +16040,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -16328,7 +16052,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16342,7 +16066,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', @@ -16363,7 +16086,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -16375,7 +16098,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16389,7 +16112,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', @@ -16410,7 +16132,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', 'has_entity_name': True, 'hidden_by': None, @@ -16422,7 +16144,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16436,7 +16158,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active storage CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', @@ -16457,7 +16178,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -16469,7 +16190,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16483,7 +16204,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', @@ -16504,7 +16224,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -16516,7 +16236,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16530,7 +16250,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', @@ -16551,7 +16270,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -16563,7 +16282,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16577,7 +16296,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', @@ -16604,7 +16322,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -16616,7 +16334,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16631,7 +16349,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -16663,7 +16380,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -16675,7 +16392,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16690,7 +16407,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -16722,7 +16438,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -16734,7 +16450,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16749,7 +16465,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -16781,7 +16496,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -16793,7 +16508,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16808,7 +16523,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -16840,7 +16554,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -16852,7 +16566,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16867,7 +16581,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -16899,7 +16612,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -16911,7 +16624,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16926,7 +16639,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -16958,7 +16670,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -16970,7 +16682,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -16985,7 +16697,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -17017,7 +16728,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -17029,7 +16740,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17044,7 +16755,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -17076,7 +16786,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', 'has_entity_name': True, 'hidden_by': None, @@ -17088,7 +16798,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17103,7 +16813,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status storage CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -17135,7 +16844,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -17147,7 +16856,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17162,7 +16871,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status storage CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -17194,7 +16902,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -17206,7 +16914,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17221,7 +16929,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status storage CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -17253,7 +16960,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -17265,7 +16972,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17280,7 +16987,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status storage CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -17326,7 +17032,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17341,7 +17047,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -17384,7 +17089,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17399,7 +17104,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -17442,7 +17146,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17457,7 +17161,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -17500,7 +17203,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17515,7 +17218,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -17555,7 +17257,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17570,7 +17272,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17609,7 +17310,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17624,7 +17325,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17663,7 +17363,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17678,7 +17378,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17717,7 +17416,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17732,7 +17431,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17771,7 +17469,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'powerfactor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17786,7 +17484,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17825,7 +17522,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17840,7 +17537,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17879,7 +17575,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17894,7 +17590,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17933,7 +17628,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -17948,7 +17643,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -17987,7 +17681,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18002,7 +17696,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor storage CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -18041,7 +17734,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18056,7 +17749,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor storage CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -18095,7 +17787,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18110,7 +17802,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor storage CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -18149,7 +17840,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18164,7 +17855,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor storage CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -18206,7 +17896,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18221,7 +17911,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18264,7 +17953,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18279,7 +17968,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18322,7 +18010,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18337,7 +18025,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18380,7 +18067,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18395,7 +18082,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18430,7 +18116,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Reserve battery energy', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18445,7 +18131,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Reserve battery energy', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -18479,7 +18164,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Reserve battery level', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18494,7 +18179,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Envoy 1234 Reserve battery level', - 'icon': 'mdi:flash', 'unit_of_measurement': '%', }), 'context': , @@ -18536,7 +18220,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Storage CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18551,7 +18235,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Storage CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18594,7 +18277,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Storage CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18609,7 +18292,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Storage CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18652,7 +18334,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Storage CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18667,7 +18349,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Storage CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18710,7 +18391,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Storage CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18725,7 +18406,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Storage CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18768,7 +18448,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18783,7 +18463,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18826,7 +18505,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18841,7 +18520,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18884,7 +18562,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18899,7 +18577,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -18942,7 +18619,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -18957,7 +18634,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19000,7 +18676,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19015,7 +18691,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19058,7 +18733,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19073,7 +18748,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19116,7 +18790,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19131,7 +18805,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19174,7 +18847,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19189,7 +18862,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19232,7 +18904,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19247,7 +18919,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage storage CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19290,7 +18961,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19305,7 +18976,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage storage CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19348,7 +19018,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19363,7 +19033,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage storage CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19406,7 +19075,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19421,7 +19090,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage storage CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19458,7 +19126,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19473,7 +19141,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19508,7 +19175,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19523,7 +19190,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.inverter_1_last_reported', @@ -19564,7 +19230,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19579,7 +19245,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19622,7 +19287,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19637,7 +19302,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19680,7 +19344,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19695,7 +19359,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19738,7 +19401,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19753,7 +19416,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19796,7 +19458,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19811,7 +19473,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19854,7 +19515,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19869,7 +19530,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19912,7 +19572,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19927,7 +19587,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -19970,7 +19629,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -19985,7 +19644,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current net power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20028,7 +19686,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20043,7 +19701,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20086,7 +19743,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20101,7 +19758,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20144,7 +19800,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20159,7 +19815,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20202,7 +19857,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20217,7 +19872,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20260,7 +19914,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20275,7 +19929,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20318,7 +19971,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20333,7 +19986,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20376,7 +20028,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20391,7 +20043,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20434,7 +20085,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20449,7 +20100,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20490,7 +20140,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20505,7 +20155,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -20545,7 +20194,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20560,7 +20209,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -20600,7 +20248,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20615,7 +20263,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -20655,7 +20302,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption last seven days l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20670,7 +20317,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -20712,7 +20358,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20727,7 +20373,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20770,7 +20415,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20785,7 +20430,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20828,7 +20472,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20843,7 +20487,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20886,7 +20529,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy consumption today l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20901,7 +20544,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy consumption today l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -20942,7 +20584,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -20957,7 +20599,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -20997,7 +20638,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21012,7 +20653,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days l1', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -21052,7 +20692,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21067,7 +20707,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days l2', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -21107,7 +20746,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21122,7 +20761,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days l3', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -21164,7 +20802,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21179,7 +20817,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21222,7 +20859,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21237,7 +20874,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21280,7 +20916,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21295,7 +20931,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21338,7 +20973,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21353,7 +20988,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21393,7 +21027,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21408,7 +21042,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21448,7 +21081,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21463,7 +21096,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21503,7 +21135,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21518,7 +21150,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21558,7 +21189,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21573,7 +21204,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21613,7 +21243,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21628,7 +21258,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21668,7 +21297,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21683,7 +21312,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21723,7 +21351,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21738,7 +21366,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21778,7 +21405,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21793,7 +21420,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21836,7 +21462,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21851,7 +21477,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21894,7 +21519,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21909,7 +21534,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -21952,7 +21576,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -21967,7 +21591,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22010,7 +21633,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22025,7 +21648,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22068,7 +21690,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22083,7 +21705,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22126,7 +21747,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22141,7 +21762,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22184,7 +21804,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22199,7 +21819,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22242,7 +21861,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22257,7 +21876,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22300,7 +21918,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22315,7 +21933,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22358,7 +21975,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22373,7 +21990,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22416,7 +22032,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22431,7 +22047,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22474,7 +22089,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22489,7 +22104,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22532,7 +22146,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22547,7 +22161,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22590,7 +22203,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22605,7 +22218,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22648,7 +22260,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22663,7 +22275,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22706,7 +22317,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22721,7 +22332,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22764,7 +22374,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22779,7 +22389,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22822,7 +22431,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22837,7 +22446,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22880,7 +22488,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22895,7 +22503,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22938,7 +22545,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime net energy production l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -22953,7 +22560,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -22976,7 +22582,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -22988,7 +22594,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23002,7 +22608,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', @@ -23023,7 +22628,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -23035,7 +22640,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23049,7 +22654,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', @@ -23070,7 +22674,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -23082,7 +22686,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23096,7 +22700,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', @@ -23117,7 +22720,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -23129,7 +22732,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23143,7 +22746,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', @@ -23164,7 +22766,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -23176,7 +22778,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23190,7 +22792,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', @@ -23211,7 +22812,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -23223,7 +22824,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23237,7 +22838,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', @@ -23258,7 +22858,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -23270,7 +22870,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23284,7 +22884,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', @@ -23305,7 +22904,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -23317,7 +22916,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23331,7 +22930,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', @@ -23358,7 +22956,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, @@ -23370,7 +22968,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23385,7 +22983,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23417,7 +23014,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -23429,7 +23026,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23444,7 +23041,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23476,7 +23072,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -23488,7 +23084,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23503,7 +23099,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23535,7 +23130,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -23547,7 +23142,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23562,7 +23157,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23594,7 +23188,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -23606,7 +23200,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23621,7 +23215,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23653,7 +23246,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, @@ -23665,7 +23258,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23680,7 +23273,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l1', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23712,7 +23304,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, @@ -23724,7 +23316,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23739,7 +23331,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l2', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23771,7 +23362,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, @@ -23783,7 +23374,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23798,7 +23389,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT l3', - 'icon': 'mdi:flash', 'options': list([ , , @@ -23844,7 +23434,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23859,7 +23449,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -23902,7 +23491,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23917,7 +23506,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -23960,7 +23548,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -23975,7 +23563,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24018,7 +23605,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Net consumption CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24033,7 +23620,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Net consumption CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24073,7 +23659,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24088,7 +23674,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24127,7 +23712,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24142,7 +23727,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24181,7 +23765,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24196,7 +23780,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24235,7 +23818,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24250,7 +23833,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24289,7 +23871,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'powerfactor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24304,7 +23886,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24343,7 +23924,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24358,7 +23939,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24397,7 +23977,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24412,7 +23992,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24451,7 +24030,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Powerfactor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24466,7 +24045,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -24508,7 +24086,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24523,7 +24101,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24566,7 +24143,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24581,7 +24158,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24624,7 +24200,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24639,7 +24215,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24682,7 +24257,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24697,7 +24272,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24740,7 +24314,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24755,7 +24329,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24798,7 +24371,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24813,7 +24386,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24856,7 +24428,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24871,7 +24443,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24914,7 +24485,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24929,7 +24500,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -24972,7 +24542,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -24987,7 +24557,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25030,7 +24599,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25045,7 +24614,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25088,7 +24656,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25103,7 +24671,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l2', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25146,7 +24713,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25161,7 +24728,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT l3', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25198,7 +24764,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25213,7 +24779,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25248,7 +24813,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25263,7 +24828,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.inverter_1_last_reported', @@ -25304,7 +24868,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25319,7 +24883,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 balanced net power consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25362,7 +24925,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Current power production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25377,7 +24940,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Envoy 1234 Current power production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25418,7 +24980,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production last seven days', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25433,7 +24995,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production last seven days', - 'icon': 'mdi:flash', 'unit_of_measurement': , }), 'context': , @@ -25475,7 +25036,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Energy production today', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25490,7 +25051,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Energy production today', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25530,7 +25090,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Frequency production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25545,7 +25105,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'frequency', 'friendly_name': 'Envoy 1234 Frequency production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25588,7 +25147,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime balanced net energy consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25603,7 +25162,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25646,7 +25204,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Lifetime energy production', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25661,7 +25219,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Envoy 1234 Lifetime energy production', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25684,7 +25241,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -25696,7 +25253,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Meter status flags active production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25710,7 +25267,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Envoy 1234 Meter status flags active production CT', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', @@ -25737,7 +25293,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', 'has_entity_name': True, 'hidden_by': None, @@ -25749,7 +25305,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Metering status production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25764,7 +25320,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'enum', 'friendly_name': 'Envoy 1234 Metering status production CT', - 'icon': 'mdi:flash', 'options': list([ , , @@ -25807,7 +25362,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'powerfactor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25822,7 +25377,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Envoy 1234 powerfactor production CT', - 'icon': 'mdi:flash', 'state_class': , }), 'context': , @@ -25864,7 +25418,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Production CT current', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25879,7 +25433,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'current', 'friendly_name': 'Envoy 1234 Production CT current', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25922,7 +25475,7 @@ }), }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Voltage production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25937,7 +25490,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'voltage', 'friendly_name': 'Envoy 1234 Voltage production CT', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -25974,7 +25526,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': None, 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -25989,7 +25541,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Inverter 1', - 'icon': 'mdi:flash', 'state_class': , 'unit_of_measurement': , }), @@ -26024,7 +25575,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:flash', + 'original_icon': None, 'original_name': 'Last reported', 'platform': 'enphase_envoy', 'previous_unique_id': None, @@ -26039,7 +25590,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'timestamp', 'friendly_name': 'Inverter 1 Last reported', - 'icon': 'mdi:flash', }), 'context': , 'entity_id': 'sensor.inverter_1_last_reported', diff --git a/tests/components/enphase_envoy/snapshots/test_switch.ambr b/tests/components/enphase_envoy/snapshots/test_switch.ambr index 46123c03cec..a022e476d5c 100644 --- a/tests/components/enphase_envoy/snapshots/test_switch.ambr +++ b/tests/components/enphase_envoy/snapshots/test_switch.ambr @@ -165,7 +165,7 @@ 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'relay_status', 'unique_id': '654321_relay_NC1_relay_status', 'unit_of_measurement': None, }) @@ -211,7 +211,7 @@ 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'relay_status', 'unique_id': '654321_relay_NC2_relay_status', 'unit_of_measurement': None, }) @@ -257,7 +257,7 @@ 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'relay_status', 'unique_id': '654321_relay_NC3_relay_status', 'unit_of_measurement': None, }) diff --git a/tests/components/enphase_envoy/test_select.py b/tests/components/enphase_envoy/test_select.py index 071dbcb2fe2..9b3a63d1e23 100644 --- a/tests/components/enphase_envoy/test_select.py +++ b/tests/components/enphase_envoy/test_select.py @@ -226,3 +226,28 @@ async def test_select_storage_modes( mock_envoy.set_storage_mode.assert_called_once_with( REVERSE_STORAGE_MODE_MAP[current_state] ) + + +@pytest.mark.parametrize( + ("mock_envoy", "use_serial"), + [ + ("envoy_metered_batt_relay", "enpower_654321"), + ("envoy_eu_batt", "envoy_1234"), + ], + indirect=["mock_envoy"], +) +async def test_select_storage_modes_if_none( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + use_serial: str, +) -> None: + """Test select platform entity storage mode when tariff storage_mode is none.""" + mock_envoy.data.tariff.storage_settings.mode = None + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + + test_entity = f"{Platform.SELECT}.{use_serial}_storage_mode" + + assert (entity_state := hass.states.get(test_entity)) + assert entity_state.state == "unknown" diff --git a/tests/components/ezviz/__init__.py b/tests/components/ezviz/__init__.py index 78bbee0b0ad..1d4911e9785 100644 --- a/tests/components/ezviz/__init__.py +++ b/tests/components/ezviz/__init__.py @@ -1,102 +1,13 @@ """Tests for the EZVIZ integration.""" -from unittest.mock import _patch, patch - -from homeassistant.components.ezviz.const import ( - ATTR_SERIAL, - ATTR_TYPE_CAMERA, - ATTR_TYPE_CLOUD, - CONF_FFMPEG_ARGUMENTS, - CONF_RFSESSION_ID, - CONF_SESSION_ID, - DEFAULT_FFMPEG_ARGUMENTS, - DEFAULT_TIMEOUT, - DOMAIN, -) -from homeassistant.const import ( - CONF_IP_ADDRESS, - CONF_PASSWORD, - CONF_TIMEOUT, - CONF_TYPE, - CONF_URL, - CONF_USERNAME, -) from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -ENTRY_CONFIG = { - CONF_SESSION_ID: "test-username", - CONF_RFSESSION_ID: "test-password", - CONF_URL: "apiieu.ezvizlife.com", - CONF_TYPE: ATTR_TYPE_CLOUD, -} -ENTRY_OPTIONS = { - CONF_FFMPEG_ARGUMENTS: DEFAULT_FFMPEG_ARGUMENTS, - CONF_TIMEOUT: DEFAULT_TIMEOUT, -} - -USER_INPUT_VALIDATE = { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_URL: "apiieu.ezvizlife.com", -} - -USER_INPUT = { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_URL: "apiieu.ezvizlife.com", - CONF_TYPE: ATTR_TYPE_CLOUD, -} - -USER_INPUT_CAMERA_VALIDATE = { - ATTR_SERIAL: "C666666", - CONF_PASSWORD: "test-password", - CONF_USERNAME: "test-username", -} - -USER_INPUT_CAMERA = { - CONF_PASSWORD: "test-password", - CONF_USERNAME: "test-username", - CONF_TYPE: ATTR_TYPE_CAMERA, -} - -DISCOVERY_INFO = { - ATTR_SERIAL: "C666666", - CONF_USERNAME: None, - CONF_PASSWORD: None, - CONF_IP_ADDRESS: "127.0.0.1", -} - -TEST = { - CONF_USERNAME: None, - CONF_PASSWORD: None, - CONF_IP_ADDRESS: "127.0.0.1", -} - -API_LOGIN_RETURN_VALIDATE = { - CONF_SESSION_ID: "fake_token", - CONF_RFSESSION_ID: "fake_rf_token", - CONF_URL: "apiieu.ezvizlife.com", - CONF_TYPE: ATTR_TYPE_CLOUD, -} - - -def patch_async_setup_entry() -> _patch: - """Patch async_setup_entry.""" - return patch( - "homeassistant.components.ezviz.async_setup_entry", - return_value=True, - ) - - -async def init_integration(hass: HomeAssistant) -> MockConfigEntry: +async def setup_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Set up the EZVIZ integration in Home Assistant.""" - entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG, options=ENTRY_OPTIONS) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - - return entry diff --git a/tests/components/ezviz/conftest.py b/tests/components/ezviz/conftest.py index 171cfffc2fc..fab8111b171 100644 --- a/tests/components/ezviz/conftest.py +++ b/tests/components/ezviz/conftest.py @@ -1,19 +1,30 @@ """Define pytest.fixtures available for all tests.""" from collections.abc import Generator -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch -from pyezviz import EzvizClient -from pyezviz.test_cam_rtsp import TestRTSPAuth import pytest +from homeassistant.components.ezviz import ( + ATTR_TYPE_CAMERA, + ATTR_TYPE_CLOUD, + CONF_RFSESSION_ID, + CONF_SESSION_ID, + DOMAIN, +) +from homeassistant.const import CONF_PASSWORD, CONF_TYPE, CONF_URL, CONF_USERNAME from homeassistant.core import HomeAssistant -ezviz_login_token_return = { - "session_id": "fake_token", - "rf_session_id": "fake_rf_token", - "api_url": "apiieu.ezvizlife.com", -} +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.ezviz.async_setup_entry", return_value=True + ) as setup_entry_mock: + yield setup_entry_mock @pytest.fixture(autouse=True) @@ -23,40 +34,67 @@ def mock_ffmpeg(hass: HomeAssistant) -> None: @pytest.fixture -def ezviz_test_rtsp_config_flow() -> Generator[MagicMock]: +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="test-username", + title="test-username", + data={ + CONF_SESSION_ID: "test-username", + CONF_RFSESSION_ID: "test-password", + CONF_URL: "apiieu.ezvizlife.com", + CONF_TYPE: ATTR_TYPE_CLOUD, + }, + ) + + +@pytest.fixture +def mock_camera_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="C666666", + title="Camera 1", + data={ + CONF_TYPE: ATTR_TYPE_CAMERA, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + +@pytest.fixture +def mock_ezviz_client() -> Generator[AsyncMock]: + """Mock the EzvizAPI for easier testing.""" + with ( + patch( + "homeassistant.components.ezviz.EzvizClient", autospec=True + ) as mock_ezviz, + patch("homeassistant.components.ezviz.config_flow.EzvizClient", new=mock_ezviz), + ): + instance = mock_ezviz.return_value + + instance.login.return_value = { + "session_id": "fake_token", + "rf_session_id": "fake_rf_token", + "api_url": "apiieu.ezvizlife.com", + } + instance.get_detection_sensibility.return_value = True + + yield instance + + +@pytest.fixture +def mock_test_rtsp_auth() -> Generator[MagicMock]: """Mock the EzvizApi for easier testing.""" with ( - patch.object(TestRTSPAuth, "main", return_value=True), patch( "homeassistant.components.ezviz.config_flow.TestRTSPAuth" ) as mock_ezviz_test_rtsp, ): - instance = mock_ezviz_test_rtsp.return_value = TestRTSPAuth( - "test-ip", - "test-username", - "test-password", - ) + instance = mock_ezviz_test_rtsp.return_value - instance.main = MagicMock(return_value=True) + instance.main.return_value = True - yield mock_ezviz_test_rtsp - - -@pytest.fixture -def ezviz_config_flow() -> Generator[MagicMock]: - """Mock the EzvizAPI for easier config flow testing.""" - with ( - patch.object(EzvizClient, "login", return_value=True), - patch("homeassistant.components.ezviz.config_flow.EzvizClient") as mock_ezviz, - ): - instance = mock_ezviz.return_value = EzvizClient( - "test-username", - "test-password", - "local.host", - "1", - ) - - instance.login = MagicMock(return_value=ezviz_login_token_return) - instance.get_detection_sensibility = MagicMock(return_value=True) - - yield mock_ezviz + yield instance diff --git a/tests/components/ezviz/test_config_flow.py b/tests/components/ezviz/test_config_flow.py index 63499996c89..ff538b31edb 100644 --- a/tests/components/ezviz/test_config_flow.py +++ b/tests/components/ezviz/test_config_flow.py @@ -1,11 +1,9 @@ """Test the EZVIZ config flow.""" -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock from pyezviz.exceptions import ( - AuthTestResultFailed, EzvizAuthVerificationCode, - HTTPError, InvalidHost, InvalidURL, PyEzvizError, @@ -15,7 +13,10 @@ import pytest from homeassistant.components.ezviz.const import ( ATTR_SERIAL, ATTR_TYPE_CAMERA, + ATTR_TYPE_CLOUD, CONF_FFMPEG_ARGUMENTS, + CONF_RFSESSION_ID, + CONF_SESSION_ID, DEFAULT_FFMPEG_ARGUMENTS, DEFAULT_TIMEOUT, DOMAIN, @@ -33,20 +34,14 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import ( - API_LOGIN_RETURN_VALIDATE, - DISCOVERY_INFO, - USER_INPUT_VALIDATE, - init_integration, - patch_async_setup_entry, -) +from . import setup_integration -from tests.common import MockConfigEntry, start_reauth_flow +from tests.common import MockConfigEntry -@pytest.mark.usefixtures("ezviz_config_flow") -async def test_user_form(hass: HomeAssistant) -> None: - """Test the user initiated form.""" +@pytest.mark.usefixtures("mock_ezviz_client") +async def test_full_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test the full flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -55,28 +50,32 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT_VALIDATE, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_URL: "apiieu.ezvizlife.com", + }, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" - assert result["data"] == {**API_LOGIN_RETURN_VALIDATE} + assert result["data"] == { + CONF_SESSION_ID: "fake_token", + CONF_RFSESSION_ID: "fake_rf_token", + CONF_URL: "apiieu.ezvizlife.com", + CONF_TYPE: ATTR_TYPE_CLOUD, + } + assert result["result"].unique_id == "test-username" assert len(mock_setup_entry.mock_calls) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured_account" - -@pytest.mark.usefixtures("ezviz_config_flow") -async def test_user_custom_url(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_ezviz_client") +async def test_user_custom_url( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: """Test custom url step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -95,45 +94,30 @@ async def test_user_custom_url(hass: HomeAssistant) -> None: assert result["step_id"] == "user_custom_url" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "test-user"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == API_LOGIN_RETURN_VALIDATE - - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.usefixtures("ezviz_config_flow") -async def test_async_step_reauth(hass: HomeAssistant) -> None: - """Test the reauth step.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "test-user"}, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - with patch_async_setup_entry() as mock_setup_entry: - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT_VALIDATE, - ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-username" - assert result["data"] == {**API_LOGIN_RETURN_VALIDATE} + assert result["data"] == { + CONF_SESSION_ID: "fake_token", + CONF_RFSESSION_ID: "fake_rf_token", + CONF_URL: "apiieu.ezvizlife.com", + CONF_TYPE: ATTR_TYPE_CLOUD, + } assert len(mock_setup_entry.mock_calls) == 1 - new_entry = hass.config_entries.async_entries(DOMAIN)[0] - result = await start_reauth_flow(hass, new_entry) + +@pytest.mark.usefixtures("mock_ezviz_client", "mock_setup_entry") +async def test_async_step_reauth( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test the reauth step.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -145,19 +129,26 @@ async def test_async_step_reauth(hass: HomeAssistant) -> None: CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" +@pytest.mark.usefixtures("mock_ezviz_client") async def test_step_discovery_abort_if_cloud_account_missing( - hass: HomeAssistant, + hass: HomeAssistant, mock_test_rtsp_auth: AsyncMock ) -> None: """Test discovery and confirm step, abort if cloud account was removed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_INTEGRATION_DISCOVERY}, data=DISCOVERY_INFO + DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data={ + ATTR_SERIAL: "C666666", + CONF_USERNAME: None, + CONF_PASSWORD: None, + CONF_IP_ADDRESS: "127.0.0.1", + }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -170,45 +161,52 @@ async def test_step_discovery_abort_if_cloud_account_missing( CONF_PASSWORD: "test-pass", }, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "ezviz_cloud_account_missing" -async def test_step_reauth_abort_if_cloud_account_missing(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_ezviz_client", "mock_test_rtsp_auth") +async def test_step_reauth_abort_if_cloud_account_missing( + hass: HomeAssistant, mock_camera_config_entry: MockConfigEntry +) -> None: """Test reauth and confirm step, abort if cloud account was removed.""" - entry = MockConfigEntry(domain=DOMAIN, data=USER_INPUT_VALIDATE) - entry.add_to_hass(hass) + mock_camera_config_entry.add_to_hass(hass) - result = await entry.start_reauth_flow(hass) + result = await mock_camera_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "ezviz_cloud_account_missing" -@pytest.mark.usefixtures("ezviz_config_flow", "ezviz_test_rtsp_config_flow") -async def test_async_step_integration_discovery(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_ezviz_client", "mock_test_rtsp_auth", "mock_setup_entry") +async def test_async_step_integration_discovery( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: """Test discovery and confirm step.""" - with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): - await init_integration(hass) + mock_config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_INTEGRATION_DISCOVERY}, data=DISCOVERY_INFO + DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data={ + ATTR_SERIAL: "C666666", + CONF_USERNAME: None, + CONF_PASSWORD: None, + CONF_IP_ADDRESS: "127.0.0.1", + }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-user", + CONF_PASSWORD: "test-pass", + }, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { @@ -216,40 +214,103 @@ async def test_async_step_integration_discovery(hass: HomeAssistant) -> None: CONF_TYPE: ATTR_TYPE_CAMERA, CONF_USERNAME: "test-user", } - - assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "C666666" -async def test_options_flow(hass: HomeAssistant) -> None: +async def test_options_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: """Test updating options.""" - with patch_async_setup_entry() as mock_setup_entry: - entry = await init_integration(hass) + await setup_integration(hass, mock_config_entry) - assert entry.options[CONF_FFMPEG_ARGUMENTS] == DEFAULT_FFMPEG_ARGUMENTS - assert entry.options[CONF_TIMEOUT] == DEFAULT_TIMEOUT + assert mock_config_entry.options[CONF_FFMPEG_ARGUMENTS] == DEFAULT_FFMPEG_ARGUMENTS + assert mock_config_entry.options[CONF_TIMEOUT] == DEFAULT_TIMEOUT - result = await hass.config_entries.options.async_init(entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - assert result["errors"] is None + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] is None - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={CONF_FFMPEG_ARGUMENTS: "/H.264", CONF_TIMEOUT: 25}, - ) - await hass.async_block_till_done() + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_FFMPEG_ARGUMENTS: "/H.264", CONF_TIMEOUT: 25}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"][CONF_FFMPEG_ARGUMENTS] == "/H.264" assert result["data"][CONF_TIMEOUT] == 25 + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (InvalidURL, "invalid_host"), + (InvalidHost, "cannot_connect"), + (EzvizAuthVerificationCode, "mfa_required"), + (PyEzvizError, "invalid_auth"), + ], +) +async def test_user_flow_errors( + hass: HomeAssistant, + mock_ezviz_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test the full flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_ezviz_client.login.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_URL: "apiieu.ezvizlife.com", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": error} + + mock_ezviz_client.login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_URL: "apiieu.ezvizlife.com", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == { + CONF_SESSION_ID: "fake_token", + CONF_RFSESSION_ID: "fake_rf_token", + CONF_URL: "apiieu.ezvizlife.com", + CONF_TYPE: ATTR_TYPE_CLOUD, + } + assert result["result"].unique_id == "test-username" + assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_form_exception( - hass: HomeAssistant, ezviz_config_flow: MagicMock +@pytest.mark.usefixtures("mock_setup_entry") +async def test_user_flow_unknown_exception( + hass: HomeAssistant, mock_ezviz_client: AsyncMock ) -> None: - """Test we handle exception on user form.""" + """Test the full flow.""" + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) @@ -257,223 +318,53 @@ async def test_user_form_exception( assert result["step_id"] == "user" assert result["errors"] == {} - ezviz_config_flow.side_effect = PyEzvizError + mock_ezviz_client.login.side_effect = Exception result = await hass.config_entries.flow.async_configure( result["flow_id"], - USER_INPUT_VALIDATE, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_auth"} - - ezviz_config_flow.side_effect = InvalidURL - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT_VALIDATE, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_host"} - - ezviz_config_flow.side_effect = EzvizAuthVerificationCode - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT_VALIDATE, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "mfa_required"} - - ezviz_config_flow.side_effect = HTTPError - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT_VALIDATE, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_auth"} - - ezviz_config_flow.side_effect = Exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT_VALIDATE, + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_URL: "apiieu.ezvizlife.com", + }, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" -async def test_discover_exception_step1( +@pytest.mark.parametrize( + ("exception", "error"), + [ + (InvalidURL, "invalid_host"), + (InvalidHost, "cannot_connect"), + (EzvizAuthVerificationCode, "mfa_required"), + (PyEzvizError, "invalid_auth"), + ], +) +async def test_user_custom_url_errors( hass: HomeAssistant, - ezviz_config_flow: MagicMock, + mock_ezviz_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, ) -> None: - """Test we handle unexpected exception on discovery.""" - with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): - await init_integration(hass) + """Test the full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_INTEGRATION_DISCOVERY}, - data={ATTR_SERIAL: "C66666", CONF_IP_ADDRESS: "test-ip"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {} - - # Test Step 1 - ezviz_config_flow.side_effect = PyEzvizError - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {"base": "invalid_auth"} - - ezviz_config_flow.side_effect = InvalidURL - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {"base": "invalid_host"} - - ezviz_config_flow.side_effect = HTTPError - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {"base": "invalid_auth"} - - ezviz_config_flow.side_effect = EzvizAuthVerificationCode - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {"base": "mfa_required"} - - ezviz_config_flow.side_effect = Exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unknown" - - -@pytest.mark.usefixtures("ezviz_config_flow") -async def test_discover_exception_step3( - hass: HomeAssistant, ezviz_test_rtsp_config_flow: MagicMock -) -> None: - """Test we handle unexpected exception on discovery.""" - with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): - await init_integration(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_INTEGRATION_DISCOVERY}, - data={ATTR_SERIAL: "C66666", CONF_IP_ADDRESS: "test-ip"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {} - - # Test Step 3 - ezviz_test_rtsp_config_flow.side_effect = AuthTestResultFailed - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {"base": "invalid_auth"} - - ezviz_test_rtsp_config_flow.side_effect = InvalidHost - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm" - assert result["errors"] == {"base": "invalid_host"} - - ezviz_test_rtsp_config_flow.side_effect = Exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unknown" - - -async def test_user_custom_url_exception( - hass: HomeAssistant, ezviz_config_flow: MagicMock -) -> None: - """Test we handle unexpected exception.""" - ezviz_config_flow.side_effect = PyEzvizError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_ezviz_client.login.side_effect = exception result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_USERNAME: "test-user", - CONF_PASSWORD: "test-pass", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", CONF_URL: CONF_CUSTOMIZE, }, ) @@ -489,56 +380,33 @@ async def test_user_custom_url_exception( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user_custom_url" - assert result["errors"] == {"base": "invalid_auth"} + assert result["errors"] == {"base": error} - ezviz_config_flow.side_effect = InvalidURL + mock_ezviz_client.login.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_URL: "test-user"}, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user_custom_url" - assert result["errors"] == {"base": "invalid_host"} + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == { + CONF_SESSION_ID: "fake_token", + CONF_RFSESSION_ID: "fake_rf_token", + CONF_URL: "apiieu.ezvizlife.com", + CONF_TYPE: ATTR_TYPE_CLOUD, + } + assert result["result"].unique_id == "test-username" - ezviz_config_flow.side_effect = HTTPError - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "test-user"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user_custom_url" - assert result["errors"] == {"base": "invalid_auth"} - - ezviz_config_flow.side_effect = EzvizAuthVerificationCode - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "test-user"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user_custom_url" - assert result["errors"] == {"base": "mfa_required"} - - ezviz_config_flow.side_effect = Exception - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_URL: "test-user"}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unknown" + assert len(mock_setup_entry.mock_calls) == 1 -async def test_async_step_reauth_exception( - hass: HomeAssistant, ezviz_config_flow: MagicMock +@pytest.mark.usefixtures("mock_setup_entry") +async def test_user_custom_url_unknown_exception( + hass: HomeAssistant, mock_ezviz_client: AsyncMock ) -> None: - """Test the reauth step exceptions.""" + """Test the full flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -547,26 +415,210 @@ async def test_async_step_reauth_exception( assert result["step_id"] == "user" assert result["errors"] == {} - with patch_async_setup_entry() as mock_setup_entry: - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - USER_INPUT_VALIDATE, - ) - await hass.async_block_till_done() + mock_ezviz_client.login.side_effect = Exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_URL: CONF_CUSTOMIZE, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user_custom_url" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "test-user"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + +async def test_already_configured( + hass: HomeAssistant, + mock_ezviz_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the flow when the account is already configured.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured_account" + + +async def test_async_step_integration_discovery_duplicate( + hass: HomeAssistant, + mock_ezviz_client: AsyncMock, + mock_test_rtsp_auth: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_camera_config_entry: MockConfigEntry, +) -> None: + """Test discovery and confirm step.""" + mock_config_entry.add_to_hass(hass) + mock_camera_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data={ + ATTR_SERIAL: "C666666", + CONF_USERNAME: None, + CONF_PASSWORD: None, + CONF_IP_ADDRESS: "127.0.0.1", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + ("exception", "error"), + [ + (InvalidURL, "invalid_host"), + (InvalidHost, "invalid_host"), + (EzvizAuthVerificationCode, "mfa_required"), + (PyEzvizError, "invalid_auth"), + ], +) +async def test_camera_errors( + hass: HomeAssistant, + mock_ezviz_client: AsyncMock, + mock_test_rtsp_auth: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test the camera flow with errors.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data={ + ATTR_SERIAL: "C666666", + CONF_USERNAME: None, + CONF_PASSWORD: None, + CONF_IP_ADDRESS: "127.0.0.1", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["errors"] == {} + + mock_ezviz_client.login.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["errors"] == {"base": error} + + mock_ezviz_client.login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-username" - assert result["data"] == {**API_LOGIN_RETURN_VALIDATE} + assert result["title"] == "C666666" + assert result["data"] == { + CONF_TYPE: ATTR_TYPE_CAMERA, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + } + assert result["result"].unique_id == "C666666" - assert len(mock_setup_entry.mock_calls) == 1 - new_entry = hass.config_entries.async_entries(DOMAIN)[0] - result = await start_reauth_flow(hass, new_entry) +@pytest.mark.usefixtures("mock_setup_entry") +async def test_camera_unknown_error( + hass: HomeAssistant, + mock_ezviz_client: AsyncMock, + mock_test_rtsp_auth: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the camera flow with errors.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data={ + ATTR_SERIAL: "C666666", + CONF_USERNAME: None, + CONF_PASSWORD: None, + CONF_IP_ADDRESS: "127.0.0.1", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["errors"] == {} + + mock_ezviz_client.login.side_effect = Exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + +@pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + ("exception", "error"), + [ + (InvalidURL, "invalid_host"), + (InvalidHost, "invalid_host"), + (EzvizAuthVerificationCode, "mfa_required"), + (PyEzvizError, "invalid_auth"), + ], +) +async def test_reauth_errors( + hass: HomeAssistant, + mock_ezviz_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test the reauth step.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} - ezviz_config_flow.side_effect = InvalidURL() + mock_ezviz_client.login.side_effect = exception + result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -574,13 +626,12 @@ async def test_async_step_reauth_exception( CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "invalid_host"} + assert result["errors"] == {"base": error} + + mock_ezviz_client.login.side_effect = None - ezviz_config_flow.side_effect = InvalidHost() result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -588,49 +639,33 @@ async def test_async_step_reauth_exception( CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "invalid_host"} - - ezviz_config_flow.side_effect = EzvizAuthVerificationCode() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "mfa_required"} - - ezviz_config_flow.side_effect = PyEzvizError() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "invalid_auth"} - - ezviz_config_flow.side_effect = Exception() - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reauth_unknown_exception( + hass: HomeAssistant, + mock_ezviz_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the reauth step.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {} + + mock_ezviz_client.login.side_effect = Exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 39435f154c4..8cbbdbb49d4 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -319,6 +319,20 @@ async def test_set_target_temp(hass: HomeAssistant) -> None: assert state.attributes.get("temperature") == 30.0 +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_temp_change_preset(hass: HomeAssistant) -> None: + """Test the setting of the target temperature. + + Verify that preset is changed. + """ + await common.async_set_temperature(hass, 30) + state = hass.states.get(ENTITY) + assert state.attributes.get("preset_mode") == PRESET_NONE + await common.async_set_temperature(hass, 20) + state = hass.states.get(ENTITY) + assert state.attributes.get("preset_mode") == PRESET_COMFORT + + @pytest.mark.parametrize( ("preset", "temp"), [ diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index 3a69455772e..122467c6b02 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -87,11 +87,12 @@ async def controller_fixture( mock_heos.load_players = AsyncMock(return_value=change_data) mock_heos._signed_in_username = "user@user.com" mock_heos.get_groups = AsyncMock(return_value=group) - mock_heos.create_group = AsyncMock(return_value=None) + mock_heos._groups = group + mock_heos.set_group = AsyncMock(return_value=None) new_mock = Mock(return_value=mock_heos) mock_heos.new_mock = new_mock with ( - patch("homeassistant.components.heos.Heos", new=new_mock), + patch("homeassistant.components.heos.coordinator.Heos", new=new_mock), patch("homeassistant.components.heos.config_flow.Heos", new=new_mock), ): yield mock_heos @@ -104,6 +105,7 @@ def players_fixture(quick_selects: dict[int, str]) -> dict[int, HeosPlayer]: for i in (1, 2): player = HeosPlayer( player_id=i, + group_id=999, name="Test Player" if i == 1 else f"Test Player {i}", model="HEOS Drive HS2" if i == 1 else "Speaker", serial="123456", @@ -139,7 +141,7 @@ def players_fixture(quick_selects: dict[int, str]) -> dict[int, HeosPlayer]: player.mute = AsyncMock() player.pause = AsyncMock() player.play = AsyncMock() - player.play_input_source = AsyncMock() + player.play_media = AsyncMock() player.play_next = AsyncMock() player.play_previous = AsyncMock() player.play_preset_station = AsyncMock() @@ -193,17 +195,28 @@ def favorites_fixture() -> dict[int, MediaItem]: @pytest.fixture(name="input_sources") def input_sources_fixture() -> list[MediaItem]: """Create a set of input sources for testing.""" - source = MediaItem( - source_id=1, - name="HEOS Drive - Line In 1", - media_id=const.INPUT_AUX_IN_1, - type=MediaType.STATION, - playable=True, - browsable=False, - image_url="", - heos=None, - ) - return [source] + return [ + MediaItem( + source_id=const.MUSIC_SOURCE_AUX_INPUT, + name="HEOS Drive - Line In 1", + media_id=const.INPUT_AUX_IN_1, + type=MediaType.STATION, + playable=True, + browsable=False, + image_url="", + heos=None, + ), + MediaItem( + source_id=const.MUSIC_SOURCE_AUX_INPUT, + name="Speaker - Line In 1", + media_id=const.INPUT_AUX_IN_1, + type=MediaType.STATION, + playable=True, + browsable=False, + image_url="", + heos=None, + ), + ] @pytest.fixture(name="discovery_data") diff --git a/tests/components/heos/snapshots/test_media_player.ambr b/tests/components/heos/snapshots/test_media_player.ambr index 7ade53c92ee..7bfdac232cb 100644 --- a/tests/components/heos/snapshots/test_media_player.ambr +++ b/tests/components/heos/snapshots/test_media_player.ambr @@ -19,13 +19,15 @@ 'media_station': 'Station Name', 'media_title': 'Song', 'media_type': 'Station', + 'repeat': , 'shuffle': False, 'source_list': list([ "Today's Hits Radio", 'Classical MPR (Classical Music)', 'HEOS Drive - Line In 1', + 'Speaker - Line In 1', ]), - 'supported_features': , + 'supported_features': , 'volume_level': 0.25, }), 'entity_id': 'media_player.test_player', diff --git a/tests/components/heos/test_config_flow.py b/tests/components/heos/test_config_flow.py index 2f01e70e2d1..39ede354496 100644 --- a/tests/components/heos/test_config_flow.py +++ b/tests/components/heos/test_config_flow.py @@ -4,7 +4,7 @@ from pyheos import CommandAuthenticationError, CommandFailedError, Heos, HeosErr import pytest from homeassistant.components.heos.const import DOMAIN -from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -358,6 +358,7 @@ async def test_reauth_signs_in_aborts( config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) result = await config_entry.start_reauth_flow(hass) + assert config_entry.state is ConfigEntryState.LOADED assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -396,6 +397,7 @@ async def test_reauth_signs_out( config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) result = await config_entry.start_reauth_flow(hass) + assert config_entry.state is ConfigEntryState.LOADED assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -447,6 +449,7 @@ async def test_reauth_flow_missing_one_param_recovers( # Start the options flow. Entry has not current options. result = await config_entry.start_reauth_flow(hass) + assert config_entry.state is ConfigEntryState.LOADED assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} assert result["type"] is FlowResultType.FORM diff --git a/tests/components/heos/test_init.py b/tests/components/heos/test_init.py index cff73ad0394..4c5eee67e2c 100644 --- a/tests/components/heos/test_init.py +++ b/tests/components/heos/test_init.py @@ -2,15 +2,7 @@ from typing import cast -from pyheos import ( - CommandFailedError, - Heos, - HeosError, - HeosOptions, - SignalHeosEvent, - SignalType, - const, -) +from pyheos import Heos, HeosError, HeosOptions, SignalHeosEvent, SignalType import pytest from homeassistant.components.heos.const import DOMAIN @@ -30,7 +22,7 @@ async def test_async_setup_entry_loads_platforms( """Test load connects to heos, retrieves players, and loads platforms.""" config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) - assert config_entry.state == ConfigEntryState.LOADED + assert config_entry.state is ConfigEntryState.LOADED assert hass.states.get("media_player.test_player") is not None assert controller.connect.call_count == 1 assert controller.get_players.call_count == 1 @@ -116,24 +108,41 @@ async def test_async_setup_entry_connect_failure( config_entry.add_to_hass(hass) controller.connect.side_effect = HeosError() assert not await hass.config_entries.async_setup(config_entry.entry_id) - assert config_entry.state == ConfigEntryState.SETUP_RETRY assert controller.connect.call_count == 1 assert controller.disconnect.call_count == 1 - controller.connect.reset_mock() - controller.disconnect.reset_mock() + assert config_entry.state is ConfigEntryState.SETUP_RETRY async def test_async_setup_entry_player_failure( hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos ) -> None: - """Failure to retrieve players/sources raises ConfigEntryNotReady.""" + """Failure to retrieve players raises ConfigEntryNotReady.""" config_entry.add_to_hass(hass) controller.get_players.side_effect = HeosError() assert not await hass.config_entries.async_setup(config_entry.entry_id) assert controller.connect.call_count == 1 assert controller.disconnect.call_count == 1 - controller.connect.reset_mock() - controller.disconnect.reset_mock() + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_async_setup_entry_favorites_failure( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos +) -> None: + """Failure to retrieve favorites loads.""" + config_entry.add_to_hass(hass) + controller.get_favorites.side_effect = HeosError() + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_async_setup_entry_inputs_failure( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos +) -> None: + """Failure to retrieve inputs loads.""" + config_entry.add_to_hass(hass) + controller.get_input_sources.side_effect = HeosError() + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.state is ConfigEntryState.LOADED async def test_unload_entry( @@ -146,27 +155,6 @@ async def test_unload_entry( assert controller.disconnect.call_count == 1 -async def test_update_sources_retry( - hass: HomeAssistant, - config_entry: MockConfigEntry, - controller: Heos, -) -> None: - """Test update sources retries on failures to max attempts.""" - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - controller.get_favorites.reset_mock() - controller.get_input_sources.reset_mock() - source_manager = config_entry.runtime_data.source_manager - source_manager.retry_delay = 0 - source_manager.max_retry_attempts = 1 - controller.get_favorites.side_effect = CommandFailedError("Test", "test", 0) - await controller.dispatcher.wait_send( - SignalType.CONTROLLER_EVENT, const.EVENT_SOURCES_CHANGED, {} - ) - await hass.async_block_till_done() - assert controller.get_favorites.call_count == 2 - - async def test_device_info( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 805e593935c..2d9f69d764d 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -1,16 +1,20 @@ """Tests for the Heos Media Player platform.""" +from datetime import timedelta import re from typing import Any +from freezegun.api import FrozenDateTimeFactory from pyheos import ( AddCriteriaType, CommandFailedError, Heos, HeosError, MediaItem, + MediaType as HeosMediaType, PlayerUpdateResult, PlayState, + RepeatType, SignalHeosEvent, SignalType, const, @@ -30,6 +34,7 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, + ATTR_MEDIA_REPEAT, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, @@ -40,6 +45,7 @@ from homeassistant.components.media_player import ( SERVICE_SELECT_SOURCE, SERVICE_UNJOIN, MediaType, + RepeatMode, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -48,6 +54,7 @@ from homeassistant.const import ( SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, SERVICE_SHUFFLE_SET, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, @@ -59,7 +66,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_state_attributes( @@ -168,22 +175,55 @@ async def test_updates_from_connection_event( assert "Unable to refresh players" in caplog.text +async def test_updates_from_connection_event_new_player_ids( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + controller: Heos, + change_data_mapped_ids: PlayerUpdateResult, +) -> None: + """Test player ids changed after reconnection updates ids.""" + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # Assert current IDs + assert device_registry.async_get_device(identifiers={(DOMAIN, "1")}) + assert entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "1") + + # Send event which will result in updated IDs. + controller.load_players.return_value = change_data_mapped_ids + await controller.dispatcher.wait_send( + SignalType.HEOS_EVENT, SignalHeosEvent.CONNECTED + ) + await hass.async_block_till_done() + + # Assert updated IDs and previous don't exist + assert not device_registry.async_get_device(identifiers={(DOMAIN, "1")}) + assert device_registry.async_get_device(identifiers={(DOMAIN, "101")}) + assert not entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "1") + assert entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "101") + + async def test_updates_from_sources_updated( hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos, - input_sources: list[MediaItem], + freezer: FrozenDateTimeFactory, ) -> None: """Tests player updates from changes in sources list.""" config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) player = controller.players[1] - input_sources.clear() + controller.get_input_sources.return_value = [] await player.heos.dispatcher.wait_send( SignalType.CONTROLLER_EVENT, const.EVENT_SOURCES_CHANGED, {} ) + freezer.tick(timedelta(seconds=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() + state = hass.states.get("media_player.test_player") assert state.attributes[ATTR_INPUT_SOURCE_LIST] == [ "Today's Hits Radio", @@ -254,6 +294,7 @@ async def test_updates_from_user_changed( hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos, + freezer: FrozenDateTimeFactory, ) -> None: """Tests player updates from changes in user.""" config_entry.add_to_hass(hass) @@ -264,10 +305,50 @@ async def test_updates_from_user_changed( await player.heos.dispatcher.wait_send( SignalType.CONTROLLER_EVENT, const.EVENT_USER_CHANGED, None ) + freezer.tick(timedelta(seconds=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get("media_player.test_player") - assert state.attributes[ATTR_INPUT_SOURCE_LIST] == ["HEOS Drive - Line In 1"] + assert state.attributes[ATTR_INPUT_SOURCE_LIST] == [ + "HEOS Drive - Line In 1", + "Speaker - Line In 1", + ] + + +async def test_updates_from_groups_changed( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos +) -> None: + """Test player updates from changes to groups.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + # Assert current state + assert hass.states.get("media_player.test_player").attributes[ + ATTR_GROUP_MEMBERS + ] == ["media_player.test_player", "media_player.test_player_2"] + assert hass.states.get("media_player.test_player_2").attributes[ + ATTR_GROUP_MEMBERS + ] == ["media_player.test_player", "media_player.test_player_2"] + + # Clear group information + controller._groups = {} + for player in controller.players.values(): + player.group_id = None + await controller.dispatcher.wait_send( + SignalType.CONTROLLER_EVENT, const.EVENT_GROUPS_CHANGED, None + ) + await hass.async_block_till_done() + + # Assert groups changed + assert ( + hass.states.get("media_player.test_player").attributes[ATTR_GROUP_MEMBERS] + is None + ) + assert ( + hass.states.get("media_player.test_player_2").attributes[ATTR_GROUP_MEMBERS] + is None + ) async def test_clear_playlist( @@ -563,6 +644,46 @@ async def test_shuffle_set_error( player.set_play_mode.assert_called_once_with(player.repeat, True) +async def test_repeat_set( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos +) -> None: + """Test the repeat set service.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + player = controller.players[1] + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_REPEAT: RepeatMode.ONE}, + blocking=True, + ) + player.set_play_mode.assert_called_once_with(RepeatType.ON_ONE, player.shuffle) + + +async def test_repeat_set_error( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos +) -> None: + """Test the repeat set service raises error.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + player = controller.players[1] + player.set_play_mode.side_effect = CommandFailedError(None, "Failure", 1) + with pytest.raises( + HomeAssistantError, + match=re.escape("Unable to set repeat: Failure (1)"), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_REPEAT: RepeatMode.ALL, + }, + blocking=True, + ) + player.set_play_mode.assert_called_once_with(RepeatType.ON_ALL, player.shuffle) + + async def test_volume_set( hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos ) -> None: @@ -620,6 +741,7 @@ async def test_select_favorite( ) player.play_preset_station.assert_called_once_with(1) # Test state is matched by station name + player.now_playing_media.type = HeosMediaType.STATION player.now_playing_media.station = favorite.name await player.heos.dispatcher.wait_send( SignalType.PLAYER_EVENT, player.player_id, const.EVENT_PLAYER_STATE_CHANGED @@ -649,6 +771,7 @@ async def test_select_radio_favorite( ) player.play_preset_station.assert_called_once_with(2) # Test state is matched by album id + player.now_playing_media.type = HeosMediaType.STATION player.now_playing_media.station = "Classical" player.now_playing_media.album_id = favorite.media_id await player.heos.dispatcher.wait_send( @@ -688,37 +811,51 @@ async def test_select_radio_favorite_command_error( player.play_preset_station.assert_called_once_with(2) +@pytest.mark.parametrize( + ("source_name", "station"), + [ + ("HEOS Drive - Line In 1", "Line In 1"), + ("Speaker - Line In 1", "Speaker - Line In 1"), + ], +) async def test_select_input_source( hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos, input_sources: list[MediaItem], + source_name: str, + station: str, ) -> None: """Tests selecting input source and state.""" config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) player = controller.players[1] - # Test proper service called - input_source = input_sources[0] + await hass.services.async_call( MEDIA_PLAYER_DOMAIN, SERVICE_SELECT_SOURCE, { ATTR_ENTITY_ID: "media_player.test_player", - ATTR_INPUT_SOURCE: input_source.name, + ATTR_INPUT_SOURCE: source_name, }, blocking=True, ) - player.play_input_source.assert_called_once_with(input_source.media_id) - # Test state is matched by media id + input_sources = next( + input_sources + for input_sources in input_sources + if input_sources.name == source_name + ) + player.play_media.assert_called_once_with(input_sources) + # Update the now_playing_media to reflect play_media player.now_playing_media.source_id = const.MUSIC_SOURCE_AUX_INPUT + player.now_playing_media.station = station player.now_playing_media.media_id = const.INPUT_AUX_IN_1 await player.heos.dispatcher.wait_send( SignalType.PLAYER_EVENT, player.player_id, const.EVENT_PLAYER_STATE_CHANGED ) await hass.async_block_till_done() state = hass.states.get("media_player.test_player") - assert state.attributes[ATTR_INPUT_SOURCE] == input_source.name + assert state.attributes[ATTR_INPUT_SOURCE] == source_name async def test_select_input_unknown_raises( @@ -750,7 +887,7 @@ async def test_select_input_command_error( await hass.config_entries.async_setup(config_entry.entry_id) player = controller.players[1] input_source = input_sources[0] - player.play_input_source.side_effect = CommandFailedError(None, "Failure", 1) + player.play_media.side_effect = CommandFailedError(None, "Failure", 1) with pytest.raises( HomeAssistantError, match=re.escape("Unable to select source: Failure (1)"), @@ -764,7 +901,7 @@ async def test_select_input_command_error( }, blocking=True, ) - player.play_input_source.assert_called_once_with(input_source.media_id) + player.play_media.assert_called_once_with(input_source) async def test_unload_config_entry( @@ -1017,8 +1154,20 @@ async def test_play_media_invalid_type( ) +@pytest.mark.parametrize( + ("members", "expected"), + [ + (["media_player.test_player_2"], [1, 2]), + (["media_player.test_player_2", "media_player.test_player"], [1, 2]), + (["media_player.test_player"], [1]), + ], +) async def test_media_player_join_group( - hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: Heos, + members: list[str], + expected: tuple[int, list[int]], ) -> None: """Test grouping of media players through the join service.""" config_entry.add_to_hass(hass) @@ -1028,16 +1177,11 @@ async def test_media_player_join_group( SERVICE_JOIN, { ATTR_ENTITY_ID: "media_player.test_player", - ATTR_GROUP_MEMBERS: ["media_player.test_player_2"], + ATTR_GROUP_MEMBERS: members, }, blocking=True, ) - controller.create_group.assert_called_once_with( - 1, - [ - 2, - ], - ) + controller.set_group.assert_called_once_with(expected) async def test_media_player_join_group_error( @@ -1046,7 +1190,7 @@ async def test_media_player_join_group_error( """Test grouping of media players through the join service raises error.""" config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) - controller.create_group.side_effect = HeosError("error") + controller.set_group.side_effect = HeosError("error") with pytest.raises( HomeAssistantError, match=re.escape("Unable to join players: error"), @@ -1088,15 +1232,24 @@ async def test_media_player_group_members_error( ) -> None: """Test error in HEOS API.""" controller.get_groups.side_effect = HeosError("error") + controller._groups = {} config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) - assert "Unable to get HEOS group info" in caplog.text + assert "Unable to retrieve groups" in caplog.text player_entity = hass.states.get("media_player.test_player") - assert player_entity.attributes[ATTR_GROUP_MEMBERS] == [] + assert player_entity.attributes[ATTR_GROUP_MEMBERS] is None +@pytest.mark.parametrize( + ("entity_id", "expected_args"), + [("media_player.test_player", [1]), ("media_player.test_player_2", [1])], +) async def test_media_player_unjoin_group( - hass: HomeAssistant, config_entry: MockConfigEntry, controller: Heos + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: Heos, + entity_id: str, + expected_args: list[int], ) -> None: """Test ungrouping of media players through the unjoin service.""" config_entry.add_to_hass(hass) @@ -1105,11 +1258,11 @@ async def test_media_player_unjoin_group( MEDIA_PLAYER_DOMAIN, SERVICE_UNJOIN, { - ATTR_ENTITY_ID: "media_player.test_player", + ATTR_ENTITY_ID: entity_id, }, blocking=True, ) - controller.create_group.assert_called_once_with(1, []) + controller.set_group.assert_called_once_with(expected_args) async def test_media_player_unjoin_group_error( @@ -1118,7 +1271,7 @@ async def test_media_player_unjoin_group_error( """Test ungrouping of media players through the unjoin service error raises.""" config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) - controller.create_group.side_effect = HeosError("error") + controller.set_group.side_effect = HeosError("error") with pytest.raises( HomeAssistantError, match=re.escape("Unable to unjoin player: error"), @@ -1147,10 +1300,7 @@ async def test_media_player_group_fails_when_entity_removed( entity_registry.async_remove("media_player.test_player_2") # Attempt to group - with pytest.raises( - HomeAssistantError, - match="The group member media_player.test_player_2 could not be resolved to a HEOS player.", - ): + with pytest.raises(ServiceValidationError, match="was not found"): await hass.services.async_call( MEDIA_PLAYER_DOMAIN, SERVICE_JOIN, @@ -1160,4 +1310,35 @@ async def test_media_player_group_fails_when_entity_removed( }, blocking=True, ) - controller.create_group.assert_not_called() + controller.set_group.assert_not_called() + + +async def test_media_player_group_fails_wrong_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: Heos, + entity_registry: er.EntityRegistry, +) -> None: + """Test grouping fails when trying to join from the wrong integration.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Create an entity in another integration + entry = entity_registry.async_get_or_create( + "media_player", "Other", "test_player_2" + ) + + # Attempt to group + with pytest.raises( + ServiceValidationError, match="is not a HEOS media player entity" + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_GROUP_MEMBERS: [entry.entity_id], + }, + blocking=True, + ) + controller.set_group.assert_not_called() diff --git a/tests/components/history/conftest.py b/tests/components/history/conftest.py index dd10fccccdc..8269d3319cb 100644 --- a/tests/components/history/conftest.py +++ b/tests/components/history/conftest.py @@ -8,12 +8,12 @@ from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/homeassistant/triggers/test_time.py b/tests/components/homeassistant/triggers/test_time.py index 8900998a7b8..40f62baa5e7 100644 --- a/tests/components/homeassistant/triggers/test_time.py +++ b/tests/components/homeassistant/triggers/test_time.py @@ -156,6 +156,86 @@ async def test_if_fires_using_at_input_datetime( ) +@pytest.mark.parametrize(("hour"), [0, 5, 23]) +@pytest.mark.parametrize( + ("has_date", "has_time"), [(True, True), (False, True), (True, False)] +) +@pytest.mark.parametrize( + ("offset", "delta"), + [ + ("00:00:10", timedelta(seconds=10)), + ("-00:00:10", timedelta(seconds=-10)), + ({"minutes": 5}, timedelta(minutes=5)), + ("01:00:10", timedelta(hours=1, seconds=10)), + ], +) +async def test_if_fires_using_at_input_datetime_with_offset( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], + has_date: bool, + has_time: bool, + offset: str, + delta: timedelta, + hour: int, +) -> None: + """Test for firing at input_datetime.""" + await async_setup_component( + hass, + "input_datetime", + {"input_datetime": {"trigger": {"has_date": has_date, "has_time": has_time}}}, + ) + now = dt_util.now() + + start_dt = now.replace( + hour=hour if has_time else 0, minute=0, second=0, microsecond=0 + ) + timedelta(2) + trigger_dt = start_dt + delta + + await hass.services.async_call( + "input_datetime", + "set_datetime", + { + ATTR_ENTITY_ID: "input_datetime.trigger", + "datetime": str(start_dt.replace(tzinfo=None)), + }, + blocking=True, + ) + await hass.async_block_till_done() + + time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1) + + some_data = "{{ trigger.platform }}-{{ trigger.now.day }}-{{ trigger.now.hour }}-{{trigger.entity_id}}" + + freezer.move_to(dt_util.as_utc(time_that_will_not_match_right_away)) + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": { + "platform": "time", + "at": {"entity_id": "input_datetime.trigger", "offset": offset}, + }, + "action": { + "service": "test.automation", + "data_template": {"some": some_data}, + }, + } + }, + ) + await hass.async_block_till_done() + + async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) + await hass.async_block_till_done() + + assert len(service_calls) == 2 + assert ( + service_calls[1].data["some"] + == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" + ) + + @pytest.mark.parametrize( ("conf_at", "trigger_deltas"), [ @@ -654,10 +734,6 @@ def test_schema_valid(conf) -> None: {"platform": "time", "at": "binary_sensor.bla"}, {"platform": "time", "at": 745}, {"platform": "time", "at": "25:00"}, - { - "platform": "time", - "at": {"entity_id": "input_datetime.bla", "offset": "0:10"}, - }, {"platform": "time", "at": {"entity_id": "13:00:00", "offset": "0:10"}}, ], ) diff --git a/tests/components/husqvarna_automower/test_button.py b/tests/components/husqvarna_automower/test_button.py index 25fa64b531f..5bef810150d 100644 --- a/tests/components/husqvarna_automower/test_button.py +++ b/tests/components/husqvarna_automower/test_button.py @@ -3,7 +3,7 @@ import datetime from unittest.mock import AsyncMock, patch -from aioautomower.exceptions import ApiException +from aioautomower.exceptions import ApiError from aioautomower.model import MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest @@ -69,7 +69,7 @@ async def test_button_states_and_commands( await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "2023-06-05T00:16:00+00:00" - getattr(mock_automower_client.commands, "error_confirm").side_effect = ApiException( + getattr(mock_automower_client.commands, "error_confirm").side_effect = ApiError( "Test error" ) with pytest.raises( @@ -111,7 +111,7 @@ async def test_sync_clock( await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "2024-02-29T11:00:00+00:00" - mock_automower_client.commands.set_datetime.side_effect = ApiException("Test error") + mock_automower_client.commands.set_datetime.side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", diff --git a/tests/components/husqvarna_automower/test_init.py b/tests/components/husqvarna_automower/test_init.py index 627cd065e79..ec1fb7391b4 100644 --- a/tests/components/husqvarna_automower/test_init.py +++ b/tests/components/husqvarna_automower/test_init.py @@ -7,10 +7,10 @@ import time from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ( - ApiException, - AuthException, + ApiError, + AuthError, + HusqvarnaTimeoutError, HusqvarnaWSServerHandshakeError, - TimeoutException, ) from aioautomower.model import MowerAttributes, WorkArea from freezegun.api import FrozenDateTimeFactory @@ -111,8 +111,8 @@ async def test_expired_token_refresh_failure( @pytest.mark.parametrize( ("exception", "entry_state"), [ - (ApiException, ConfigEntryState.SETUP_RETRY), - (AuthException, ConfigEntryState.SETUP_ERROR), + (ApiError, ConfigEntryState.SETUP_RETRY), + (AuthError, ConfigEntryState.SETUP_ERROR), ], ) async def test_update_failed( @@ -142,7 +142,7 @@ async def test_update_failed( ), ( ["start_listening"], - TimeoutException, + HusqvarnaTimeoutError, "Failed to listen to websocket.", ), ], diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 3aca509e865..044989e5cf0 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -3,7 +3,7 @@ from datetime import timedelta from unittest.mock import AsyncMock -from aioautomower.exceptions import ApiException +from aioautomower.exceptions import ApiError from aioautomower.model import MowerActivities, MowerAttributes, MowerStates from freezegun.api import FrozenDateTimeFactory import pytest @@ -82,7 +82,7 @@ async def test_lawn_mower_commands( getattr( mock_automower_client.commands, aioautomower_command - ).side_effect = ApiException("Test error") + ).side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", @@ -142,7 +142,7 @@ async def test_lawn_mower_service_commands( getattr( mock_automower_client.commands, aioautomower_command - ).side_effect = ApiException("Test error") + ).side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", @@ -196,7 +196,7 @@ async def test_lawn_mower_override_work_area_command( getattr( mock_automower_client.commands, aioautomower_command - ).side_effect = ApiException("Test error") + ).side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index e1f232e7b5c..55bf5dda7eb 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -3,7 +3,7 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch -from aioautomower.exceptions import ApiException +from aioautomower.exceptions import ApiError from aioautomower.model import MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest @@ -40,7 +40,7 @@ async def test_number_commands( mocked_method = mock_automower_client.commands.set_cutting_height mocked_method.assert_called_once_with(TEST_MOWER_ID, 3) - mocked_method.side_effect = ApiException("Test error") + mocked_method.side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", @@ -84,7 +84,7 @@ async def test_number_workarea_commands( assert state.state is not None assert state.state == "75" - mocked_method.side_effect = ApiException("Test error") + mocked_method.side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", diff --git a/tests/components/husqvarna_automower/test_select.py b/tests/components/husqvarna_automower/test_select.py index 18d1b0ed21f..01e7607735b 100644 --- a/tests/components/husqvarna_automower/test_select.py +++ b/tests/components/husqvarna_automower/test_select.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aioautomower.exceptions import ApiException +from aioautomower.exceptions import ApiError from aioautomower.model import HeadlightModes, MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest @@ -77,7 +77,7 @@ async def test_select_commands( mocked_method.assert_called_once_with(TEST_MOWER_ID, service.upper()) assert len(mocked_method.mock_calls) == 1 - mocked_method.side_effect = ApiException("Test error") + mocked_method.side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 100fd9fe3a4..48903a9630b 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -4,7 +4,7 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch import zoneinfo -from aioautomower.exceptions import ApiException +from aioautomower.exceptions import ApiError from aioautomower.model import MowerAttributes, MowerModes, Zone from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory @@ -92,7 +92,7 @@ async def test_switch_commands( mocked_method = getattr(mock_automower_client.commands, aioautomower_command) mocked_method.assert_called_once_with(TEST_MOWER_ID) - mocked_method.side_effect = ApiException("Test error") + mocked_method.side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", @@ -144,12 +144,12 @@ async def test_stay_out_zone_switch_commands( freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) async_fire_time_changed(hass) await hass.async_block_till_done() - mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_ZONE_ID, boolean) + mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_ZONE_ID, switch=boolean) state = hass.states.get(entity_id) assert state is not None assert state.state == excepted_state - mocked_method.side_effect = ApiException("Test error") + mocked_method.side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", @@ -207,7 +207,7 @@ async def test_work_area_switch_commands( assert state is not None assert state.state == excepted_state - mocked_method.side_effect = ApiException("Test error") + mocked_method.side_effect = ApiError("Test error") with pytest.raises( HomeAssistantError, match="Failed to send command: Test error", diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index 4d25fd5840b..cf723d885e1 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -9,7 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.hydrawise.const import DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,16 +33,16 @@ async def test_form( assert result["step_id"] == "user" assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"}, ) mock_pydrawise.get_user.return_value = user await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Hydrawise" - assert result2["data"] == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "asdf@asdf.com" + assert result["data"] == { CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__", } @@ -69,14 +69,14 @@ async def test_form_api_error( mock_pydrawise.get_user.reset_mock(side_effect=True) mock_pydrawise.get_user.return_value = user - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) - assert result2["type"] is FlowResultType.CREATE_ENTRY + result = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result["type"] is FlowResultType.CREATE_ENTRY async def test_form_auth_connect_timeout( hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock ) -> None: - """Test we handle API errors.""" + """Test we handle connection timeout errors.""" mock_auth.token.side_effect = TimeoutError init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -90,8 +90,8 @@ async def test_form_auth_connect_timeout( assert result["errors"] == {"base": "timeout_connect"} mock_auth.token.reset_mock(side_effect=True) - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) - assert result2["type"] is FlowResultType.CREATE_ENTRY + result = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result["type"] is FlowResultType.CREATE_ENTRY async def test_form_client_connect_timeout( @@ -112,8 +112,8 @@ async def test_form_client_connect_timeout( mock_pydrawise.get_user.reset_mock(side_effect=True) mock_pydrawise.get_user.return_value = user - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) - assert result2["type"] is FlowResultType.CREATE_ENTRY + result = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result["type"] is FlowResultType.CREATE_ENTRY async def test_form_not_authorized_error( @@ -133,8 +133,8 @@ async def test_form_not_authorized_error( assert result["errors"] == {"base": "invalid_auth"} mock_auth.token.reset_mock(side_effect=True) - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) - assert result2["type"] is FlowResultType.CREATE_ENTRY + result = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result["type"] is FlowResultType.CREATE_ENTRY async def test_reauth( @@ -148,7 +148,8 @@ async def test_reauth( title="Hydrawise", domain=DOMAIN, data={ - CONF_API_KEY: "__api_key__", + CONF_USERNAME: "asdf@asdf.com", + CONF_PASSWORD: "bad-password", }, unique_id="hydrawise-12345", ) @@ -160,14 +161,49 @@ async def test_reauth( flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 [result] = flows - assert result["step_id"] == "user" + assert result["step_id"] == "reauth_confirm" - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"}, - ) mock_pydrawise.get_user.return_value = user + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_PASSWORD: "__password__"} + ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +async def test_reauth_fails( + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User +) -> None: + """Test that the reauth flow handles API errors.""" + mock_config_entry = MockConfigEntry( + title="Hydrawise", + domain=DOMAIN, + data={ + CONF_USERNAME: "asdf@asdf.com", + CONF_PASSWORD: "bad-password", + }, + unique_id="hydrawise-12345", + ) + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + assert result["step_id"] == "reauth_confirm" + + mock_auth.token.side_effect = NotAuthorizedError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_PASSWORD: "__password__"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + mock_auth.token.reset_mock(side_effect=True) + mock_pydrawise.get_user.return_value = user + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_PASSWORD: "__password__"} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/incomfort/conftest.py b/tests/components/incomfort/conftest.py index 3829c42d07f..aacfa886f52 100644 --- a/tests/components/incomfort/conftest.py +++ b/tests/components/incomfort/conftest.py @@ -18,6 +18,11 @@ MOCK_CONFIG = { "password": "verysecret", } +MOCK_CONFIG_DHCP = { + "username": "admin", + "password": "verysecret", +} + MOCK_HEATER_STATUS = { "display_code": DisplayCode.STANDBY, "display_text": "standby", diff --git a/tests/components/incomfort/test_config_flow.py b/tests/components/incomfort/test_config_flow.py index 9ab5a672d61..e3579182b3d 100644 --- a/tests/components/incomfort/test_config_flow.py +++ b/tests/components/incomfort/test_config_flow.py @@ -4,19 +4,33 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientResponseError -from incomfortclient import IncomfortError, InvalidHeaterList +from incomfortclient import InvalidGateway, InvalidHeaterList import pytest from homeassistant.components.incomfort.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo -from .conftest import MOCK_CONFIG +from .conftest import MOCK_CONFIG, MOCK_CONFIG_DHCP from tests.common import MockConfigEntry +DHCP_SERVICE_INFO = DhcpServiceInfo( + hostname="rfgateway", + ip="192.168.1.12", + macaddress="0004A3DEADFF", +) + +DHCP_SERVICE_INFO_ALT = DhcpServiceInfo( + hostname="rfgateway", + ip="192.168.1.99", + macaddress="0004A3DEADFF", +) + async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_incomfort: MagicMock @@ -67,24 +81,22 @@ async def test_entry_already_configured( ("exc", "error", "base"), [ ( - IncomfortError(ClientResponseError(None, None, status=401)), + InvalidGateway, "auth_error", - CONF_PASSWORD, - ), - ( - IncomfortError(ClientResponseError(None, None, status=404)), - "not_found", "base", ), ( - IncomfortError(ClientResponseError(None, None, status=500)), + InvalidHeaterList, + "no_heaters", + "base", + ), + ( + ClientResponseError(None, None, status=500), "unknown", "base", ), - (IncomfortError, "unknown", "base"), - (ValueError, "unknown", "base"), (TimeoutError, "timeout_error", "base"), - (InvalidHeaterList, "no_heaters", "base"), + (ValueError, "unknown", "base"), ], ) async def test_form_validation( @@ -118,6 +130,139 @@ async def test_form_validation( assert "errors" not in result +async def test_dhcp_flow_simple( + hass: HomeAssistant, + mock_incomfort: MagicMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test dhcp flow for older gateway without authentication needed. + + Assert on the creation of the gateway device, climate and boiler devices. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=DHCP_SERVICE_INFO + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "dhcp_confirm" + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {"host": "192.168.1.12"} + + config_entry: ConfigEntry = result["result"] + entry_id = config_entry.entry_id + + await hass.async_block_till_done(wait_background_tasks=True) + + # Check the gateway device is discovered + gateway_device = device_registry.async_get_device(identifiers={(DOMAIN, entry_id)}) + assert gateway_device is not None + assert gateway_device.name == "RFGateway" + assert gateway_device.manufacturer == "Intergas" + assert gateway_device.connections == {("mac", "00:04:a3:de:ad:ff")} + + devices = device_registry.devices.get_devices_for_config_entry_id(entry_id) + assert len(devices) == 3 + boiler_device = device_registry.async_get_device( + identifiers={(DOMAIN, "c0ffeec0ffee")} + ) + assert boiler_device.via_device_id == gateway_device.id + assert boiler_device is not None + climate_device = device_registry.async_get_device( + identifiers={(DOMAIN, "c0ffeec0ffee_1")} + ) + assert climate_device is not None + assert climate_device.via_device_id == gateway_device.id + + # Check the host is dynamically updated + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=DHCP_SERVICE_INFO_ALT + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert config_entry.data[CONF_HOST] == DHCP_SERVICE_INFO_ALT.ip + + +async def test_dhcp_flow_migrates_existing_entry_without_unique_id( + hass: HomeAssistant, + mock_incomfort: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test dhcp flow migrates an existing entry without unique_id.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=DHCP_SERVICE_INFO + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + # Check the gateway device is discovered after a reload + # And has updated connections + gateway_device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.entry_id)} + ) + assert gateway_device is not None + assert gateway_device.name == "RFGateway" + assert gateway_device.manufacturer == "Intergas" + assert gateway_device.connections == {("mac", "00:04:a3:de:ad:ff")} + + devices = device_registry.devices.get_devices_for_config_entry_id( + mock_config_entry.entry_id + ) + assert len(devices) == 3 + boiler_device = device_registry.async_get_device( + identifiers={(DOMAIN, "c0ffeec0ffee")} + ) + assert boiler_device.via_device_id == gateway_device.id + assert boiler_device is not None + climate_device = device_registry.async_get_device( + identifiers={(DOMAIN, "c0ffeec0ffee_1")} + ) + assert climate_device is not None + assert climate_device.via_device_id == gateway_device.id + + +async def test_dhcp_flow_wih_auth( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_incomfort: MagicMock +) -> None: + """Test dhcp flow for with authentication.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=DHCP_SERVICE_INFO + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "dhcp_confirm" + + # Try again, but now with the correct host, but still with an auth error + with patch.object( + mock_incomfort(), + "heaters", + side_effect=InvalidGateway, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "192.168.1.12"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "dhcp_auth" + assert result["errors"] == {"base": "auth_error"} + + # Submit the form with added credentials + result = await hass.config_entries.flow.async_configure( + result["flow_id"], MOCK_CONFIG_DHCP + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Intergas InComfort/Intouch Lan2RF gateway" + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_reauth_flow_success( hass: HomeAssistant, mock_incomfort: MagicMock, @@ -153,14 +298,14 @@ async def test_reauth_flow_failure( with patch.object( mock_incomfort(), "heaters", - side_effect=IncomfortError(ClientResponseError(None, None, status=401)), + side_effect=InvalidGateway, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "incorrect-password"}, ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {CONF_PASSWORD: "auth_error"} + assert result["errors"] == {"base": "auth_error"} result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -205,14 +350,14 @@ async def test_reconfigure_flow_failure( with patch.object( mock_incomfort(), "heaters", - side_effect=IncomfortError(ClientResponseError(None, None, status=401)), + side_effect=InvalidGateway, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_CONFIG | {CONF_PASSWORD: "wrong-password"}, ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {CONF_PASSWORD: "auth_error"} + assert result["errors"] == {"base": "auth_error"} result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/incomfort/test_init.py b/tests/components/incomfort/test_init.py index f603c3ce27b..a9b3a8e4e3a 100644 --- a/tests/components/incomfort/test_init.py +++ b/tests/components/incomfort/test_init.py @@ -5,10 +5,9 @@ from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientResponseError, RequestInfo from freezegun.api import FrozenDateTimeFactory -from incomfortclient import IncomfortError +from incomfortclient import InvalidGateway, InvalidHeaterList import pytest -from homeassistant.components.incomfort import InvalidHeaterList from homeassistant.components.incomfort.coordinator import UPDATE_INTERVAL from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE @@ -66,20 +65,27 @@ async def test_coordinator_updates( @pytest.mark.parametrize( "exc", [ - IncomfortError(ClientResponseError(None, None, status=401)), - IncomfortError( - ClientResponseError( - RequestInfo( - url="http://example.com", - method="GET", - headers=[], - real_url="http://example.com", - ), - None, - status=500, - ) + ClientResponseError( + RequestInfo( + url="http://example.com", + method="GET", + headers=[], + real_url="http://example.com", + ), + None, + status=401, + ), + InvalidHeaterList, + ClientResponseError( + RequestInfo( + url="http://example.com", + method="GET", + headers=[], + real_url="http://example.com", + ), + None, + status=500, ), - IncomfortError(ValueError("some_error")), TimeoutError, ], ) @@ -113,30 +119,36 @@ async def test_coordinator_update_fails( ("exc", "config_entry_state"), [ ( - IncomfortError(ClientResponseError(None, None, status=401)), - ConfigEntryState.SETUP_ERROR, - ), - ( - IncomfortError(ClientResponseError(None, None, status=404)), + InvalidGateway, ConfigEntryState.SETUP_ERROR, ), (InvalidHeaterList, ConfigEntryState.SETUP_RETRY), ( - IncomfortError( - ClientResponseError( - RequestInfo( - url="http://example.com", - method="GET", - headers=[], - real_url="http://example.com", - ), - None, - status=500, - ) + ClientResponseError( + RequestInfo( + url="http://example.com", + method="GET", + headers=[], + real_url="http://example.com", + ), + None, + status=404, + ), + ConfigEntryState.SETUP_ERROR, + ), + ( + ClientResponseError( + RequestInfo( + url="http://example.com", + method="GET", + headers=[], + real_url="http://example.com", + ), + None, + status=500, ), ConfigEntryState.SETUP_RETRY, ), - (IncomfortError(ValueError("some_error")), ConfigEntryState.SETUP_RETRY), (TimeoutError, ConfigEntryState.SETUP_RETRY), ], ) diff --git a/tests/components/led_ble/test_config_flow.py b/tests/components/led_ble/test_config_flow.py index c22c62e2fb1..674700aebd9 100644 --- a/tests/components/led_ble/test_config_flow.py +++ b/tests/components/led_ble/test_config_flow.py @@ -3,6 +3,7 @@ from unittest.mock import patch from bleak import BleakError +from led_ble import CharacteristicMissingError from homeassistant import config_entries from homeassistant.components.led_ble.const import DOMAIN @@ -202,6 +203,35 @@ async def test_user_step_unknown_exception(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +async def test_user_step_not_supported(hass: HomeAssistant) -> None: + """Test user step with a non supported device.""" + with patch( + "homeassistant.components.led_ble.config_flow.async_discovered_service_info", + return_value=[LED_BLE_DISCOVERY_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.led_ble.config_flow.LEDBLE.update", + side_effect=CharacteristicMissingError, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: LED_BLE_DISCOVERY_INFO.address, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "not_supported" + + async def test_bluetooth_step_success(hass: HomeAssistant) -> None: """Test bluetooth step success path.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/litterrobot/conftest.py b/tests/components/litterrobot/conftest.py index 181e4fc1a90..5cd97e5937d 100644 --- a/tests/components/litterrobot/conftest.py +++ b/tests/components/litterrobot/conftest.py @@ -9,10 +9,9 @@ from pylitterbot import Account, FeederRobot, LitterRobot3, LitterRobot4, Robot from pylitterbot.exceptions import InvalidCommandException import pytest -from homeassistant.components import litterrobot from homeassistant.core import HomeAssistant -from .common import CONFIG, FEEDER_ROBOT_DATA, ROBOT_4_DATA, ROBOT_DATA +from .common import CONFIG, DOMAIN, FEEDER_ROBOT_DATA, ROBOT_4_DATA, ROBOT_DATA from tests.common import MockConfigEntry @@ -117,16 +116,16 @@ def mock_account_with_side_effects() -> MagicMock: async def setup_integration( hass: HomeAssistant, mock_account: MagicMock, platform_domain: str | None = None ) -> MockConfigEntry: - """Load a Litter-Robot platform with the provided hub.""" + """Load a Litter-Robot platform with the provided coordinator.""" entry = MockConfigEntry( - domain=litterrobot.DOMAIN, - data=CONFIG[litterrobot.DOMAIN], + domain=DOMAIN, + data=CONFIG[DOMAIN], ) entry.add_to_hass(hass) with ( patch( - "homeassistant.components.litterrobot.hub.Account", + "homeassistant.components.litterrobot.coordinator.Account", return_value=mock_account, ), patch( diff --git a/tests/components/litterrobot/test_binary_sensor.py b/tests/components/litterrobot/test_binary_sensor.py index 69b3f7ce3ab..3fe72aef7e3 100644 --- a/tests/components/litterrobot/test_binary_sensor.py +++ b/tests/components/litterrobot/test_binary_sensor.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock import pytest from homeassistant.components.binary_sensor import ( - DOMAIN as PLATFORM_DOMAIN, + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, ) from homeassistant.const import ATTR_DEVICE_CLASS @@ -21,7 +21,7 @@ async def test_binary_sensors( mock_account: MagicMock, ) -> None: """Tests binary sensors.""" - await setup_integration(hass, mock_account, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account, BINARY_SENSOR_DOMAIN) state = hass.states.get("binary_sensor.test_sleeping") assert state.state == "off" diff --git a/tests/components/litterrobot/test_config_flow.py b/tests/components/litterrobot/test_config_flow.py index 9420d3cb8a8..2eadafb0d0c 100644 --- a/tests/components/litterrobot/test_config_flow.py +++ b/tests/components/litterrobot/test_config_flow.py @@ -6,7 +6,6 @@ from pylitterbot import Account from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginException from homeassistant import config_entries -from homeassistant.components import litterrobot from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -49,14 +48,14 @@ async def test_form(hass: HomeAssistant, mock_account) -> None: async def test_already_configured(hass: HomeAssistant) -> None: """Test we handle already configured.""" MockConfigEntry( - domain=litterrobot.DOMAIN, - data=CONFIG[litterrobot.DOMAIN], + domain=DOMAIN, + data=CONFIG[DOMAIN], ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=CONFIG[litterrobot.DOMAIN], + data=CONFIG[DOMAIN], ) assert result["type"] is FlowResultType.ABORT @@ -119,8 +118,8 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_step_reauth(hass: HomeAssistant, mock_account: Account) -> None: """Test the reauth flow.""" entry = MockConfigEntry( - domain=litterrobot.DOMAIN, - data=CONFIG[litterrobot.DOMAIN], + domain=DOMAIN, + data=CONFIG[DOMAIN], ) entry.add_to_hass(hass) @@ -141,7 +140,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_account: Account) -> None: ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_PASSWORD: CONFIG[litterrobot.DOMAIN][CONF_PASSWORD]}, + user_input={CONF_PASSWORD: CONFIG[DOMAIN][CONF_PASSWORD]}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" @@ -151,8 +150,8 @@ async def test_step_reauth(hass: HomeAssistant, mock_account: Account) -> None: async def test_step_reauth_failed(hass: HomeAssistant, mock_account: Account) -> None: """Test the reauth flow fails and recovers.""" entry = MockConfigEntry( - domain=litterrobot.DOMAIN, - data=CONFIG[litterrobot.DOMAIN], + domain=DOMAIN, + data=CONFIG[DOMAIN], ) entry.add_to_hass(hass) @@ -167,7 +166,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, mock_account: Account) -> ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_PASSWORD: CONFIG[litterrobot.DOMAIN][CONF_PASSWORD]}, + user_input={CONF_PASSWORD: CONFIG[DOMAIN][CONF_PASSWORD]}, ) assert result["type"] is FlowResultType.FORM @@ -185,7 +184,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, mock_account: Account) -> ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_PASSWORD: CONFIG[litterrobot.DOMAIN][CONF_PASSWORD]}, + user_input={CONF_PASSWORD: CONFIG[DOMAIN][CONF_PASSWORD]}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/litterrobot/test_init.py b/tests/components/litterrobot/test_init.py index 1c8e0742b26..e42bdb048b7 100644 --- a/tests/components/litterrobot/test_init.py +++ b/tests/components/litterrobot/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import MagicMock, patch from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginException import pytest -from homeassistant.components import litterrobot from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, SERVICE_START, @@ -17,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from .common import CONFIG, VACUUM_ENTITY_ID +from .common import CONFIG, DOMAIN, VACUUM_ENTITY_ID from .conftest import setup_integration from tests.common import MockConfigEntry @@ -57,13 +56,13 @@ async def test_entry_not_setup( ) -> None: """Test being able to handle config entry not setup.""" entry = MockConfigEntry( - domain=litterrobot.DOMAIN, - data=CONFIG[litterrobot.DOMAIN], + domain=DOMAIN, + data=CONFIG[DOMAIN], ) entry.add_to_hass(hass) with patch( - "homeassistant.components.litterrobot.hub.Account.connect", + "homeassistant.components.litterrobot.coordinator.Account.connect", side_effect=side_effect, ): await hass.config_entries.async_setup(entry.entry_id) @@ -91,7 +90,7 @@ async def test_device_remove_devices( dead_device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, - identifiers={(litterrobot.DOMAIN, "test-serial", "remove-serial")}, + identifiers={(DOMAIN, "test-serial", "remove-serial")}, ) response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) assert response["success"] diff --git a/tests/components/litterrobot/test_select.py b/tests/components/litterrobot/test_select.py index 48ec1bb06a5..b4902a56e63 100644 --- a/tests/components/litterrobot/test_select.py +++ b/tests/components/litterrobot/test_select.py @@ -8,7 +8,7 @@ import pytest from homeassistant.components.select import ( ATTR_OPTION, ATTR_OPTIONS, - DOMAIN as PLATFORM_DOMAIN, + DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) from homeassistant.const import ATTR_ENTITY_ID, EntityCategory @@ -26,7 +26,7 @@ async def test_wait_time_select( hass: HomeAssistant, mock_account, entity_registry: er.EntityRegistry ) -> None: """Tests the wait time select entity.""" - await setup_integration(hass, mock_account, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account, SELECT_DOMAIN) select = hass.states.get(SELECT_ENTITY_ID) assert select @@ -41,7 +41,7 @@ async def test_wait_time_select( data[ATTR_OPTION] = wait_time await hass.services.async_call( - PLATFORM_DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, data, blocking=True, @@ -52,7 +52,7 @@ async def test_wait_time_select( async def test_invalid_wait_time_select(hass: HomeAssistant, mock_account) -> None: """Tests the wait time select entity with invalid value.""" - await setup_integration(hass, mock_account, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account, SELECT_DOMAIN) select = hass.states.get(SELECT_ENTITY_ID) assert select @@ -61,7 +61,7 @@ async def test_invalid_wait_time_select(hass: HomeAssistant, mock_account) -> No with pytest.raises(ServiceValidationError): await hass.services.async_call( - PLATFORM_DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, data, blocking=True, @@ -75,7 +75,7 @@ async def test_panel_brightness_select( entity_registry: er.EntityRegistry, ) -> None: """Tests the wait time select entity.""" - await setup_integration(hass, mock_account_with_litterrobot_4, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account_with_litterrobot_4, SELECT_DOMAIN) select = hass.states.get(PANEL_BRIGHTNESS_ENTITY_ID) assert select @@ -94,7 +94,7 @@ async def test_panel_brightness_select( data[ATTR_OPTION] = option await hass.services.async_call( - PLATFORM_DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, data, blocking=True, diff --git a/tests/components/litterrobot/test_vacuum.py b/tests/components/litterrobot/test_vacuum.py index f18098ccf1d..0255e0e6a8a 100644 --- a/tests/components/litterrobot/test_vacuum.py +++ b/tests/components/litterrobot/test_vacuum.py @@ -8,11 +8,9 @@ from unittest.mock import MagicMock from pylitterbot import Robot import pytest -from homeassistant.components.litterrobot import DOMAIN from homeassistant.components.litterrobot.vacuum import SERVICE_SET_SLEEP_MODE from homeassistant.components.vacuum import ( - ATTR_STATUS, - DOMAIN as PLATFORM_DOMAIN, + DOMAIN as VACUUM_DOMAIN, SERVICE_START, SERVICE_STOP, VacuumActivity, @@ -21,7 +19,7 @@ from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er, issue_registry as ir -from .common import VACUUM_ENTITY_ID +from .common import DOMAIN, VACUUM_ENTITY_ID from .conftest import setup_integration VACUUM_UNIQUE_ID = "LR3C012345-litter_box" @@ -37,45 +35,33 @@ async def test_vacuum( """Tests the vacuum entity was set up.""" entity_registry.async_get_or_create( - PLATFORM_DOMAIN, + VACUUM_DOMAIN, DOMAIN, VACUUM_UNIQUE_ID, - suggested_object_id=VACUUM_ENTITY_ID.replace(PLATFORM_DOMAIN, ""), + suggested_object_id=VACUUM_ENTITY_ID.replace(VACUUM_DOMAIN, ""), ) ent_reg_entry = entity_registry.async_get(VACUUM_ENTITY_ID) assert ent_reg_entry.unique_id == VACUUM_UNIQUE_ID - await setup_integration(hass, mock_account, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account, VACUUM_DOMAIN) assert len(entity_registry.entities) == 1 assert hass.services.has_service(DOMAIN, SERVICE_SET_SLEEP_MODE) vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum assert vacuum.state == VacuumActivity.DOCKED - assert vacuum.attributes["is_sleeping"] is False ent_reg_entry = entity_registry.async_get(VACUUM_ENTITY_ID) assert ent_reg_entry.unique_id == VACUUM_UNIQUE_ID -async def test_vacuum_status_when_sleeping( - hass: HomeAssistant, mock_account_with_sleeping_robot: MagicMock -) -> None: - """Tests the vacuum status when sleeping.""" - await setup_integration(hass, mock_account_with_sleeping_robot, PLATFORM_DOMAIN) - - vacuum = hass.states.get(VACUUM_ENTITY_ID) - assert vacuum - assert vacuum.attributes.get(ATTR_STATUS) == "Ready (Sleeping)" - - async def test_no_robots( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_account_with_no_robots: MagicMock, ) -> None: """Tests the vacuum entity was set up.""" - entry = await setup_integration(hass, mock_account_with_no_robots, PLATFORM_DOMAIN) + entry = await setup_integration(hass, mock_account_with_no_robots, VACUUM_DOMAIN) assert not hass.services.has_service(DOMAIN, SERVICE_SET_SLEEP_MODE) @@ -89,7 +75,7 @@ async def test_vacuum_with_error( hass: HomeAssistant, mock_account_with_error: MagicMock ) -> None: """Tests a vacuum entity with an error.""" - await setup_integration(hass, mock_account_with_error, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account_with_error, VACUUM_DOMAIN) vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum @@ -114,7 +100,7 @@ async def test_activities( expected_state: str, ) -> None: """Test sending commands to the switch.""" - await setup_integration(hass, mock_account_with_litterrobot_4, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account_with_litterrobot_4, VACUUM_DOMAIN) robot: Robot = mock_account_with_litterrobot_4.robots[0] robot._update_data(robot_data, partial=True) @@ -147,7 +133,7 @@ async def test_commands( issue_registry: ir.IssueRegistry, ) -> None: """Test sending commands to the vacuum.""" - await setup_integration(hass, mock_account, PLATFORM_DOMAIN) + await setup_integration(hass, mock_account, VACUUM_DOMAIN) vacuum = hass.states.get(VACUUM_ENTITY_ID) assert vacuum @@ -158,7 +144,7 @@ async def test_commands( issues = extra.get("issues", set()) await hass.services.async_call( - COMPONENT_SERVICE_DOMAIN.get(service, PLATFORM_DOMAIN), + COMPONENT_SERVICE_DOMAIN.get(service, VACUUM_DOMAIN), service, data, blocking=True, diff --git a/tests/components/lovelace/test_init.py b/tests/components/lovelace/test_init.py index 14d93d8302f..f56ff4371e6 100644 --- a/tests/components/lovelace/test_init.py +++ b/tests/components/lovelace/test_init.py @@ -12,16 +12,6 @@ from homeassistant.setup import async_setup_component from tests.typing import WebSocketGenerator -@pytest.fixture -def mock_onboarding_not_done() -> Generator[MagicMock]: - """Mock that Home Assistant is currently onboarding.""" - with patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=False, - ) as mock_onboarding: - yield mock_onboarding - - @pytest.fixture def mock_onboarding_done() -> Generator[MagicMock]: """Mock that Home Assistant is currently onboarding.""" @@ -32,15 +22,6 @@ def mock_onboarding_done() -> Generator[MagicMock]: yield mock_onboarding -@pytest.fixture -def mock_add_onboarding_listener() -> Generator[MagicMock]: - """Mock that Home Assistant is currently onboarding.""" - with patch( - "homeassistant.components.onboarding.async_add_listener", - ) as mock_add_onboarding_listener: - yield mock_add_onboarding_listener - - async def test_create_dashboards_when_onboarded( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -57,42 +38,3 @@ async def test_create_dashboards_when_onboarded( response = await client.receive_json() assert response["success"] assert response["result"] == [] - - -async def test_create_dashboards_when_not_onboarded( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], - mock_add_onboarding_listener, - mock_onboarding_not_done, -) -> None: - """Test we automatically create dashboards when not onboarded.""" - client = await hass_ws_client(hass) - - assert await async_setup_component(hass, "lovelace", {}) - - # Call onboarding listener - mock_add_onboarding_listener.mock_calls[0][1][1]() - await hass.async_block_till_done() - - # List dashboards - await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] == [ - { - "icon": "mdi:map", - "id": "map", - "mode": "storage", - "require_admin": False, - "show_in_sidebar": True, - "title": "Map", - "url_path": "map", - } - ] - - # List map dashboard config - await client.send_json_auto_id({"type": "lovelace/config", "url_path": "map"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] == {"strategy": {"type": "map"}} diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index bbafec48e10..4e078f86939 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -104,6 +104,7 @@ async def integration_fixture( "pressure_sensor", "room_airconditioner", "silabs_dishwasher", + "silabs_laundrywasher", "smoke_detector", "switch_unit", "temperature_sensor", diff --git a/tests/components/matter/fixtures/nodes/silabs_laundrywasher.json b/tests/components/matter/fixtures/nodes/silabs_laundrywasher.json new file mode 100644 index 00000000000..4d26dfb03aa --- /dev/null +++ b/tests/components/matter/fixtures/nodes/silabs_laundrywasher.json @@ -0,0 +1,909 @@ +{ + "node_id": 29, + "date_commissioned": "2024-10-19T19:49:36.900186", + "last_interview": "2024-10-20T09:26:38.517535", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [ + 29, 31, 40, 42, 43, 44, 45, 48, 49, 50, 51, 52, 53, 60, 62, 63, 64, 65 + ], + "0/29/2": [41], + "0/29/3": [1, 2], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 4 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "Silabs", + "0/40/2": 65521, + "0/40/3": "LaundryWasher", + "0/40/4": 32773, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 1, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1", + "0/40/11": "20200101", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "", + "0/40/16": false, + "0/40/18": "DC840FF79F5DBFCE", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/21": 16973824, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 3, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, + 65528, 65529, 65531, 65532, 65533 + ], + "0/42/0": [], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/43/0": "en-US", + "0/43/1": [ + "en-US", + "de-DE", + "fr-FR", + "en-GB", + "es-ES", + "zh-CN", + "it-IT", + "ja-JP" + ], + "0/43/65532": 0, + "0/43/65533": 1, + "0/43/65528": [], + "0/43/65529": [], + "0/43/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "0/44/0": 0, + "0/44/1": 0, + "0/44/2": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 7], + "0/44/65532": 0, + "0/44/65533": 1, + "0/44/65528": [], + "0/44/65529": [], + "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/45/0": 1, + "0/45/65532": 0, + "0/45/65533": 1, + "0/45/65528": [], + "0/45/65529": [], + "0/45/65531": [0, 65528, 65529, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "p0jbsOzJRNw=", + "1": true + } + ], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "p0jbsOzJRNw=", + "0/49/7": null, + "0/49/9": 10, + "0/49/10": 4, + "0/49/65532": 2, + "0/49/65533": 2, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 65528, 65529, 65531, 65532, 65533 + ], + "0/50/65532": 0, + "0/50/65533": 1, + "0/50/65528": [1], + "0/50/65529": [0], + "0/50/65531": [65528, 65529, 65531, 65532, 65533], + "0/51/0": [ + { + "0": "MyHome", + "1": true, + "2": null, + "3": null, + "4": "GstaSerJSho=", + "5": [], + "6": [ + "/cS6oCynAAGilSC/p+bVSg==", + "/QANuACgAAAAAAD//gDIAA==", + "/QANuACgAABL3TOUNF1NGw==", + "/oAAAAAAAAAYy1pJ6slKGg==" + ], + "7": 4 + } + ], + "0/51/1": 10, + "0/51/2": 1934, + "0/51/3": 17, + "0/51/4": 6, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 + ], + "0/52/0": [ + { + "0": 8, + "1": "shell", + "3": 324 + }, + { + "0": 3, + "1": "UART", + "3": 127 + }, + { + "0": 2, + "1": "OT Stack", + "3": 719 + }, + { + "0": 9, + "1": "LaundryW", + "3": 767 + }, + { + "0": 12, + "1": "Bluetoot", + "3": 174 + }, + { + "0": 1, + "1": "Bluetoot", + "3": 294 + }, + { + "0": 11, + "1": "Bluetoot", + "3": 216 + }, + { + "0": 6, + "1": "Tmr Svc", + "3": 586 + }, + { + "0": 5, + "1": "IDLE", + "3": 264 + }, + { + "0": 7, + "1": "CHIP", + "3": 699 + } + ], + "0/52/1": 99808, + "0/52/2": 17592, + "0/52/3": 4294959166, + "0/52/65532": 1, + "0/52/65533": 1, + "0/52/65528": [], + "0/52/65529": [0], + "0/52/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/53/0": 25, + "0/53/1": 5, + "0/53/2": "MyHome", + "0/53/3": 4660, + "0/53/4": 12054125955590472924, + "0/53/5": "QP0ADbgAoAAA", + "0/53/6": 0, + "0/53/7": [ + { + "0": 7100699097952925053, + "1": 23, + "2": 15360, + "3": 222256, + "4": 71507, + "5": 2, + "6": -83, + "7": -90, + "8": 56, + "9": 3, + "10": true, + "11": true, + "12": true, + "13": false + }, + { + "0": 9656160343072683744, + "1": 16, + "2": 17408, + "3": 211448, + "4": 95936, + "5": 3, + "6": -53, + "7": -59, + "8": 0, + "9": 0, + "10": true, + "11": true, + "12": true, + "13": false + }, + { + "0": 5926511551178228101, + "1": 0, + "2": 19456, + "3": 420246, + "4": 89821, + "5": 3, + "6": -57, + "7": -56, + "8": 0, + "9": 0, + "10": true, + "11": true, + "12": true, + "13": false + }, + { + "0": 3029834005214616809, + "1": 8, + "2": 22528, + "3": 125241, + "4": 91286, + "5": 3, + "6": -73, + "7": -81, + "8": 0, + "9": 0, + "10": true, + "11": true, + "12": true, + "13": false + }, + { + "0": 17459145101989614194, + "1": 7, + "2": 26624, + "3": 1426216, + "4": 36884, + "5": 3, + "6": -39, + "7": -39, + "8": 34, + "9": 0, + "10": true, + "11": true, + "12": true, + "13": false + }, + { + "0": 17503311195895696084, + "1": 30, + "2": 29696, + "3": 577028, + "4": 98083, + "5": 2, + "6": -84, + "7": -85, + "8": 65, + "9": 20, + "10": true, + "11": true, + "12": true, + "13": false + }, + { + "0": 8241705229565301122, + "1": 19, + "2": 57344, + "3": 488092, + "4": 55364, + "5": 3, + "6": -48, + "7": -48, + "8": 1, + "9": 0, + "10": true, + "11": true, + "12": true, + "13": false + } + ], + "0/53/8": [ + { + "0": 7100699097952925053, + "1": 15360, + "2": 15, + "3": 22, + "4": 1, + "5": 2, + "6": 2, + "7": 23, + "8": true, + "9": true + }, + { + "0": 9656160343072683744, + "1": 17408, + "2": 17, + "3": 19, + "4": 1, + "5": 3, + "6": 3, + "7": 16, + "8": true, + "9": true + }, + { + "0": 5926511551178228101, + "1": 19456, + "2": 19, + "3": 17, + "4": 1, + "5": 3, + "6": 3, + "7": 0, + "8": true, + "9": true + }, + { + "0": 3029834005214616809, + "1": 22528, + "2": 22, + "3": 17, + "4": 1, + "5": 3, + "6": 3, + "7": 8, + "8": true, + "9": true + }, + { + "0": 17459145101989614194, + "1": 26624, + "2": 26, + "3": 17, + "4": 1, + "5": 3, + "6": 3, + "7": 7, + "8": true, + "9": true + }, + { + "0": 17503311195895696084, + "1": 29696, + "2": 29, + "3": 26, + "4": 1, + "5": 2, + "6": 2, + "7": 30, + "8": true, + "9": true + }, + { + "0": 0, + "1": 51200, + "2": 50, + "3": 63, + "4": 0, + "5": 0, + "6": 0, + "7": 0, + "8": true, + "9": false + }, + { + "0": 8241705229565301122, + "1": 57344, + "2": 56, + "3": 17, + "4": 1, + "5": 3, + "6": 3, + "7": 19, + "8": true, + "9": true + } + ], + "0/53/9": 1348153998, + "0/53/10": 68, + "0/53/11": 49, + "0/53/12": 120, + "0/53/13": 56, + "0/53/14": 1, + "0/53/15": 0, + "0/53/16": 1, + "0/53/17": 0, + "0/53/18": 0, + "0/53/19": 1, + "0/53/20": 0, + "0/53/21": 0, + "0/53/22": 18798, + "0/53/23": 18683, + "0/53/24": 115, + "0/53/25": 18699, + "0/53/26": 18492, + "0/53/27": 115, + "0/53/28": 18814, + "0/53/29": 0, + "0/53/30": 0, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 15745, + "0/53/34": 207, + "0/53/35": 0, + "0/53/36": 71, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 7183, + "0/53/40": 6295, + "0/53/41": 886, + "0/53/42": 6140, + "0/53/43": 0, + "0/53/44": 0, + "0/53/45": 0, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 0, + "0/53/49": 1041, + "0/53/50": 0, + "0/53/51": 2, + "0/53/52": 0, + "0/53/53": 0, + "0/53/54": 0, + "0/53/55": 0, + "0/53/56": 65536, + "0/53/57": 0, + "0/53/58": 0, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//4A==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [], + "0/53/65532": 15, + "0/53/65533": 2, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [ + { + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRHRgkBwEkCAEwCUEEJu8N93WFULw4vts483kDAExYc3VhKuaWdmpdJnF5pDcls+y34i6RfchubiU77BJq8zo9VGn6J59mVROTzKgr0DcKNQEoARgkAgE2AwQCBAEYMAQUbJ+53QmsxXf2iP0oL4td/BQFi0gwBRRT9HTfU5Nds+HA8j+/MRP+0pVyIxgwC0BFzpzN0Z0DdN+oPUwK87jzZ8amzJxWlmbnW/Q+j1Z4ziWsFy3yLAsgKYL4nOexZZSqvlEvzMhpstndmh1eGYZfGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEyT62Yt4qMI+MorlmQ/Hxh2CpLetznVknlAbhvYAwTexpSxp9GnhR09SrcUhz3mOb0eZa2TylqcnPBhHJ2Ih2RTcKNQEpARgkAmAwBBRT9HTfU5Nds+HA8j+/MRP+0pVyIzAFFOMCO8Jk7ZCknJquFGPtPzJiNqsDGDALQI/Kc38hQyK7AkT7/pN4hiYW3LoWKT3NA43+ssMJoVpDcaZ989GXBQKIbHKbBEXzUQ1J8wfL7l2pL0Z8Lso9JwgY", + "254": 4 + } + ], + "0/62/1": [ + { + "1": "BIrruNo7r0gX6j6lq1dDi5zeK3jxcTavjt2o4adCCSCYtbxOakfb7C3GXqgV4LzulFSinbewmYkdqFBHqm5pxvU=", + "2": 4939, + "3": 2, + "4": 29, + "5": "", + "254": 4 + } + ], + "0/62/2": 5, + "0/62/3": 4, + "0/62/4": [ + "FTABAQAkAgE3AyYUyakYCSYVj6gLsxgmBP2G+CskBQA3BiYUyakYCSYVj6gLsxgkBwEkCAEwCUEEgYwxrTB+tyiEGfrRwjlXTG34MiQtJXbg5Qqd0ohdRW7MfwYY7vZiX/0h9hI8MqUralFaVPcnghAP0MSJm1YrqTcKNQEpARgkAmAwBBS3BS9aJzt+p6i28Nj+trB2Uu+vdzAFFLcFL1onO36nqLbw2P62sHZS7693GDALQIrLt7Uq3S9HEe7apdzYSR+j3BLWNXSTLWD4YbrdyYLpm6xqHDV/NPARcIp4skZdtz91WwFBDfuS4jO5aVoER1sY", + "FTABAQAkAgE3AycUQhmZbaIbYjokFQIYJgRWZLcqJAUANwYnFEIZmW2iG2I6JBUCGCQHASQIATAJQQT2AlKGW/kOMjqayzeO0md523/fuhrhGEUU91uQpTiKo0I7wcPpKnmrwfQNPX6g0kEQl+VGaXa3e22lzfu5Tzp0Nwo1ASkBGCQCYDAEFOOMk13ScMKuT2hlaydi1yEJnhTqMAUU44yTXdJwwq5PaGVrJ2LXIQmeFOoYMAtAv2jJd1qd5miXbYesH1XrJ+vgyY0hzGuZ78N6Jw4Cb1oN1sLSpA+PNM0u7+hsEqcSvvn2eSV8EaRR+hg5YQjHDxg=", + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEE0j40vjcb6ZsmtBR/I0rB3ZIfAA8lPeWCTxG7nPSbNpepe18XwLidhFIHKmvtZWDZ3Hl3MM9NBB+LAZlCFq/edjcKNQEpARgkAmAwBBS7EfW886qYxvWeWjpA/G/CjDuwEDAFFLsR9bzzqpjG9Z5aOkD8b8KMO7AQGDALQIgQgt5asUGXO0ZyTWWKdjAmBSoJAzRMuD4Z+tQYZanQ3s0OItL07MU2In6uyXhjNBfjJlRqon780lhjTsm2Y+8Y", + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEiuu42juvSBfqPqWrV0OLnN4rePFxNq+O3ajhp0IJIJi1vE5qR9vsLcZeqBXgvO6UVKKdt7CZiR2oUEeqbmnG9TcKNQEpARgkAmAwBBTjAjvCZO2QpJyarhRj7T8yYjarAzAFFOMCO8Jk7ZCknJquFGPtPzJiNqsDGDALQE7hTxTRg92QOxwA1hK3xv8DaxvxL71r6ZHcNRzug9wNnonJ+NC84SFKvKDxwcBxHYqFdIyDiDgwJNTQIBgasmIY" + ], + "0/62/5": 4, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/64/0": [ + { + "0": "room", + "1": "bedroom 2" + }, + { + "0": "orientation", + "1": "North" + }, + { + "0": "floor", + "1": "2" + }, + { + "0": "direction", + "1": "up" + } + ], + "0/64/65532": 0, + "0/64/65533": 1, + "0/64/65528": [], + "0/64/65529": [], + "0/64/65531": [0, 65528, 65529, 65531, 65532, 65533], + "0/65/0": [], + "0/65/65532": 0, + "0/65/65533": 1, + "0/65/65528": [], + "0/65/65529": [], + "0/65/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 115, + "1": 1 + } + ], + "1/29/1": [3, 29, 30, 81, 83, 86, 96], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/30/0": [], + "1/30/65532": 0, + "1/30/65533": 1, + "1/30/65528": [], + "1/30/65529": [], + "1/30/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/81/0": null, + "1/81/1": null, + "1/81/65532": null, + "1/81/65533": 2, + "1/81/65528": [1], + "1/81/65529": [0], + "1/81/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/83/0": ["Off", "Low", "Medium", "High"], + "1/83/1": 0, + "1/83/2": 0, + "1/83/3": [1, 2], + "1/83/65532": 3, + "1/83/65533": 1, + "1/83/65528": [], + "1/83/65529": [], + "1/83/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/86/0": 0, + "1/86/1": 0, + "1/86/2": 0, + "1/86/3": 0, + "1/86/4": 1, + "1/86/5": ["Cold", "Colors", "Whites"], + "1/86/65532": 2, + "1/86/65533": 1, + "1/86/65528": [], + "1/86/65529": [0], + "1/86/65531": [4, 5, 65528, 65529, 65531, 65532, 65533], + "1/96/0": null, + "1/96/1": null, + "1/96/3": [ + { + "0": 0 + }, + { + "0": 1 + }, + { + "0": 2 + }, + { + "0": 3 + } + ], + "1/96/4": 0, + "1/96/5": { + "0": 0 + }, + "1/96/65532": 0, + "1/96/65533": 1, + "1/96/65528": [4], + "1/96/65529": [0, 1, 2, 3], + "1/96/65531": [0, 1, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "2/29/0": [ + { + "0": 1296, + "1": 1 + } + ], + "2/29/1": [29, 144, 145, 156], + "2/29/2": [], + "2/29/3": [], + "2/29/65532": 0, + "2/29/65533": 2, + "2/29/65528": [], + "2/29/65529": [], + "2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "2/144/0": 2, + "2/144/1": 3, + "2/144/2": [ + { + "0": 5, + "1": true, + "2": -50000000, + "3": 50000000, + "4": [ + { + "0": -50000000, + "1": -10000000, + "2": 5000, + "3": 2000, + "4": 3000 + }, + { + "0": -9999999, + "1": 9999999, + "2": 1000, + "3": 100, + "4": 500 + }, + { + "0": 10000000, + "1": 50000000, + "2": 5000, + "3": 2000, + "4": 3000 + } + ] + }, + { + "0": 2, + "1": true, + "2": -100000, + "3": 100000, + "4": [ + { + "0": -100000, + "1": -5000, + "2": 5000, + "3": 2000, + "4": 3000 + }, + { + "0": -4999, + "1": 4999, + "2": 1000, + "3": 100, + "4": 500 + }, + { + "0": 5000, + "1": 100000, + "2": 5000, + "3": 2000, + "4": 3000 + } + ] + }, + { + "0": 1, + "1": true, + "2": -500000, + "3": 500000, + "4": [ + { + "0": -500000, + "1": -100000, + "2": 5000, + "3": 2000, + "4": 3000 + }, + { + "0": -99999, + "1": 99999, + "2": 1000, + "3": 100, + "4": 500 + }, + { + "0": 100000, + "1": 500000, + "2": 5000, + "3": 2000, + "4": 3000 + } + ] + } + ], + "2/144/3": [ + { + "0": 0, + "1": 0, + "2": 300, + "7": 129, + "8": 129, + "9": 129, + "10": 129 + }, + { + "0": 1, + "1": 0, + "2": 500, + "7": 129, + "8": 129, + "9": 129, + "10": 129 + }, + { + "0": 2, + "1": 0, + "2": 1000, + "7": 129, + "8": 129, + "9": 129, + "10": 129 + } + ], + "2/144/4": 120000, + "2/144/5": 0, + "2/144/6": 0, + "2/144/7": 0, + "2/144/8": 0, + "2/144/9": 0, + "2/144/10": 0, + "2/144/11": 120000, + "2/144/12": 0, + "2/144/13": 0, + "2/144/14": 60, + "2/144/15": [ + { + "0": 1, + "1": 100000 + } + ], + "2/144/16": [ + { + "0": 1, + "1": 100000 + } + ], + "2/144/17": 9800, + "2/144/18": 0, + "2/144/65532": 31, + "2/144/65533": 1, + "2/144/65528": [], + "2/144/65529": [], + "2/144/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 65528, + 65529, 65531, 65532, 65533 + ], + "2/145/0": { + "0": 14, + "1": true, + "2": 0, + "3": 1000000000000000, + "4": [ + { + "0": 0, + "1": 1000000000000000, + "2": 500, + "3": 50 + } + ] + }, + "2/145/1": { + "0": 0, + "1": 1900, + "2": 1936, + "3": 1900222, + "4": 1936790 + }, + "2/145/5": { + "0": 0, + "1": 0, + "2": 0, + "3": 0 + }, + "2/145/65532": 5, + "2/145/65533": 1, + "2/145/65528": [], + "2/145/65529": [], + "2/145/65531": [0, 1, 5, 65528, 65529, 65531, 65532, 65533], + "2/156/0": [0, 1, 2], + "2/156/1": null, + "2/156/65532": 12, + "2/156/65533": 1, + "2/156/65528": [], + "2/156/65529": [], + "2/156/65531": [0, 1, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/snapshots/test_button.ambr b/tests/components/matter/snapshots/test_button.ambr index 10792b58d28..bcba0da808e 100644 --- a/tests/components/matter/snapshots/test_button.ambr +++ b/tests/components/matter/snapshots/test_button.ambr @@ -2340,6 +2340,237 @@ 'state': 'unknown', }) # --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.laundrywasher_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'LaundryWasher Identify', + }), + 'context': , + 'entity_id': 'button.laundrywasher_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_pause-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.laundrywasher_pause', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pause', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pause', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-OperationalStatePauseButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_pause-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LaundryWasher Pause', + }), + 'context': , + 'entity_id': 'button.laundrywasher_pause', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_resume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.laundrywasher_resume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Resume', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'resume', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-OperationalStateResumeButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_resume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LaundryWasher Resume', + }), + 'context': , + 'entity_id': 'button.laundrywasher_resume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_start-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.laundrywasher_start', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-OperationalStateStartButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_start-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LaundryWasher Start', + }), + 'context': , + 'entity_id': 'button.laundrywasher_start', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.laundrywasher_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-OperationalStateStopButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_laundrywasher][button.laundrywasher_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LaundryWasher Stop', + }), + 'context': , + 'entity_id': 'button.laundrywasher_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_buttons[smoke_detector][button.smoke_sensor_identify-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/snapshots/test_number.ambr b/tests/components/matter/snapshots/test_number.ambr index 9d51bb92e51..7e06b6f501d 100644 --- a/tests/components/matter/snapshots/test_number.ambr +++ b/tests/components/matter/snapshots/test_number.ambr @@ -388,6 +388,63 @@ 'state': '1.0', }) # --- +# name: test_numbers[eve_thermo][number.eve_thermo_temperature_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 25, + 'min': -25, + 'mode': , + 'step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.eve_thermo_temperature_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature offset', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_offset', + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-EveTemperatureOffset-513-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[eve_thermo][number.eve_thermo_temperature_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Eve Thermo Temperature offset', + 'max': 25, + 'min': -25, + 'mode': , + 'step': 0.5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.eve_thermo_temperature_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_numbers[eve_weather_sensor][number.eve_weather_altitude_above_sea_level-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/snapshots/test_select.ambr b/tests/components/matter/snapshots/test_select.ambr index 663b0cdaf51..19a90503086 100644 --- a/tests/components/matter/snapshots/test_select.ambr +++ b/tests/components/matter/snapshots/test_select.ambr @@ -546,6 +546,61 @@ 'state': 'previous', }) # --- +# name: test_selects[eve_thermo][select.eve_thermo_temperature_display_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Celsius', + 'Fahrenheit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.eve_thermo_temperature_display_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature display mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_display_mode', + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-TrvTemperatureDisplayMode-516-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[eve_thermo][select.eve_thermo_temperature_display_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Thermo Temperature display mode', + 'options': list([ + 'Celsius', + 'Fahrenheit', + ]), + }), + 'context': , + 'entity_id': 'select.eve_thermo_temperature_display_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Celsius', + }) +# --- # name: test_selects[extended_color_light][select.mock_extended_color_light_lighting-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1514,6 +1569,114 @@ 'state': 'unknown', }) # --- +# name: test_selects[silabs_laundrywasher][select.laundrywasher_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.laundrywasher_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-MatterLaundryWasherMode-81-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[silabs_laundrywasher][select.laundrywasher_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LaundryWasher Mode', + 'options': list([ + ]), + }), + 'context': , + 'entity_id': 'select.laundrywasher_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_selects[silabs_laundrywasher][select.laundrywasher_temperature_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Cold', + 'Colors', + 'Whites', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.laundrywasher_temperature_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_level', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-TemperatureControlSelectedTemperatureLevel-86-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[silabs_laundrywasher][select.laundrywasher_temperature_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LaundryWasher Temperature level', + 'options': list([ + 'Cold', + 'Colors', + 'Whites', + ]), + }), + 'context': , + 'entity_id': 'select.laundrywasher_temperature_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Colors', + }) +# --- # name: test_selects[switch_unit][select.mock_switchunit_power_on_behavior_on_startup-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1573,6 +1736,61 @@ 'state': 'previous', }) # --- +# name: test_selects[thermostat][select.longan_link_hvac_temperature_display_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Celsius', + 'Fahrenheit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.longan_link_hvac_temperature_display_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature display mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_display_mode', + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-TrvTemperatureDisplayMode-516-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[thermostat][select.longan_link_hvac_temperature_display_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Longan link HVAC Temperature display mode', + 'options': list([ + 'Celsius', + 'Fahrenheit', + ]), + }), + 'context': , + 'entity_id': 'select.longan_link_hvac_temperature_display_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Celsius', + }) +# --- # name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index f88604e7d46..205cba68d7c 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1145,98 +1145,6 @@ 'state': '189.0', }) # --- -# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_door_lock_battery_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Battery type', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_replacement_description', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Door Lock Battery type', - }), - 'context': , - 'entity_id': 'sensor.mock_door_lock_battery_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '', - }) -# --- -# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.mock_door_lock_battery_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Battery type', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_replacement_description', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Door Lock Battery type', - }), - 'context': , - 'entity_id': 'sensor.mock_door_lock_battery_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '', - }) -# --- # name: test_sensors[eve_contact_sensor][sensor.eve_door_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1288,52 +1196,6 @@ 'state': '100', }) # --- -# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_door_battery_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Battery type', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_replacement_description', - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Eve Door Battery type', - }), - 'context': , - 'entity_id': 'sensor.eve_door_battery_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '', - }) -# --- # name: test_sensors[eve_contact_sensor][sensor.eve_door_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1883,52 +1745,6 @@ 'state': '100', }) # --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_thermo_battery_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Battery type', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_replacement_description', - 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Eve Thermo Battery type', - }), - 'context': , - 'entity_id': 'sensor.eve_thermo_battery_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '', - }) -# --- # name: test_sensors[eve_thermo][sensor.eve_thermo_valve_position-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2081,52 +1897,6 @@ 'state': '100', }) # --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.eve_weather_battery_type', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Battery type', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_replacement_description', - 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Eve Weather Battery type', - }), - 'context': , - 'entity_id': 'sensor.eve_weather_battery_type', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '', - }) -# --- # name: test_sensors[eve_weather_sensor][sensor.eve_weather_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2941,6 +2711,294 @@ 'state': '120.0', }) # --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.laundrywasher_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-ElectricalPowerMeasurementActiveCurrent-144-5', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'LaundryWasher Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.laundrywasher_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.laundrywasher_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-ElectricalEnergyMeasurementCumulativeEnergyImported-145-1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'LaundryWasher Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.laundrywasher_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_operational_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stopped', + 'running', + 'paused', + 'error', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.laundrywasher_operational_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Operational state', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operational_state', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-OperationalState-96-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_operational_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'LaundryWasher Operational state', + 'options': list([ + 'stopped', + 'running', + 'paused', + 'error', + ]), + }), + 'context': , + 'entity_id': 'sensor.laundrywasher_operational_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.laundrywasher_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-ElectricalPowerMeasurementWatt-144-8', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'LaundryWasher Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.laundrywasher_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.laundrywasher_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-ElectricalPowerMeasurementVoltage-144-4', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[silabs_laundrywasher][sensor.laundrywasher_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'LaundryWasher Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.laundrywasher_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120.0', + }) +# --- # name: test_sensors[smoke_detector][sensor.smoke_sensor_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/snapshots/test_switch.ambr b/tests/components/matter/snapshots/test_switch.ambr index 9396dccd245..612e81580a5 100644 --- a/tests/components/matter/snapshots/test_switch.ambr +++ b/tests/components/matter/snapshots/test_switch.ambr @@ -187,6 +187,52 @@ 'state': 'off', }) # --- +# name: test_switches[eve_thermo][switch.eve_thermo_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.eve_thermo_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-EveTrvChildLock-516-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[eve_thermo][switch.eve_thermo_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Thermo Child lock', + }), + 'context': , + 'entity_id': 'switch.eve_thermo_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py index ffe996fd840..3643aa83fca 100644 --- a/tests/components/matter/test_select.py +++ b/tests/components/matter/test_select.py @@ -103,3 +103,44 @@ async def test_attribute_select_entities( await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.state == "unknown" + + +@pytest.mark.parametrize("node_fixture", ["silabs_laundrywasher"]) +async def test_list_select_entities( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test ListSelect entities are discovered and working from a laundrywasher fixture.""" + state = hass.states.get("select.laundrywasher_temperature_level") + assert state + assert state.state == "Colors" + assert state.attributes["options"] == ["Cold", "Colors", "Whites"] + # Change temperature_level + set_node_attribute(matter_node, 1, 86, 4, 0) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("select.laundrywasher_temperature_level") + assert state.state == "Cold" + # test select option + await hass.services.async_call( + "select", + "select_option", + { + "entity_id": "select.laundrywasher_temperature_level", + "option": "Whites", + }, + blocking=True, + ) + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.TemperatureControl.Commands.SetTemperature( + targetTemperatureLevel=2 + ), + ) + # test that an invalid value (e.g. 253) leads to an unknown state + set_node_attribute(matter_node, 1, 86, 4, 253) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("select.laundrywasher_temperature_level") + assert state.state == "unknown" diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 3215ec58116..630809a957d 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -193,6 +193,12 @@ async def test_battery_sensor_description( assert state assert state.state == "CR2032" + # case with a empty string to check if the attribute is indeed ignored + set_node_attribute(matter_node, 1, 47, 19, "") + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.smoke_sensor_battery_type") is None + @pytest.mark.parametrize("node_fixture", ["eve_thermo"]) async def test_eve_thermo_sensor( diff --git a/tests/components/matter/test_switch.py b/tests/components/matter/test_switch.py index d7a6a700cde..11451c715c3 100644 --- a/tests/components/matter/test_switch.py +++ b/tests/components/matter/test_switch.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock, call from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode +from matter_server.common.helpers.util import create_attribute_path_from_attribute import pytest from syrupy import SnapshotAssertion @@ -110,3 +111,57 @@ async def test_power_switch(hass: HomeAssistant, matter_node: MatterNode) -> Non assert state assert state.state == "off" assert state.attributes["friendly_name"] == "Room AirConditioner Power" + + +@pytest.mark.parametrize("node_fixture", ["eve_thermo"]) +async def test_numeric_switch( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test numeric switch entity is discovered and working using an Eve Thermo fixture .""" + state = hass.states.get("switch.eve_thermo_child_lock") + assert state + assert state.state == "off" + # name should be derived from description attribute + assert state.attributes["friendly_name"] == "Eve Thermo Child lock" + # test attribute changes + set_node_attribute(matter_node, 1, 516, 1, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("switch.eve_thermo_child_lock") + assert state.state == "on" + set_node_attribute(matter_node, 1, 516, 1, 0) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("switch.eve_thermo_child_lock") + assert state.state == "off" + # test switch service + await hass.services.async_call( + "switch", + "turn_on", + {"entity_id": "switch.eve_thermo_child_lock"}, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 1 + assert matter_client.write_attribute.call_args_list[0] == call( + node_id=matter_node.node_id, + attribute_path=create_attribute_path_from_attribute( + endpoint_id=1, + attribute=clusters.ThermostatUserInterfaceConfiguration.Attributes.KeypadLockout, + ), + value=1, + ) + await hass.services.async_call( + "switch", + "turn_off", + {"entity_id": "switch.eve_thermo_child_lock"}, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 2 + assert matter_client.write_attribute.call_args_list[1] == call( + node_id=matter_node.node_id, + attribute_path=create_attribute_path_from_attribute( + endpoint_id=1, + attribute=clusters.ThermostatUserInterfaceConfiguration.Attributes.KeypadLockout, + ), + value=0, + ) diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index 1520e4478c6..b5bc9b02808 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -394,7 +394,7 @@ async def test_hvac_onoff_values(hass: HomeAssistant, mock_modbus) -> None: ) await hass.async_block_till_done() - mock_modbus.write_register.assert_called_with(11, 0xAA, slave=10) + mock_modbus.write_register.assert_called_with(11, value=0xAA, slave=10) await hass.services.async_call( CLIMATE_DOMAIN, @@ -404,7 +404,7 @@ async def test_hvac_onoff_values(hass: HomeAssistant, mock_modbus) -> None: ) await hass.async_block_till_done() - mock_modbus.write_register.assert_called_with(11, 0xFF, slave=10) + mock_modbus.write_register.assert_called_with(11, value=0xFF, slave=10) @pytest.mark.parametrize( diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 5dd3f6e9033..e105818d193 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -846,6 +846,13 @@ async def test_pb_service_write( CALL_TYPE_WRITE_REGISTERS: mock_modbus_with_pymodbus.write_registers, } + value_arg_name = { + CALL_TYPE_WRITE_COIL: "value", + CALL_TYPE_WRITE_COILS: "values", + CALL_TYPE_WRITE_REGISTER: "value", + CALL_TYPE_WRITE_REGISTERS: "values", + } + data = { ATTR_HUB: TEST_MODBUS_NAME, do_slave: 17, @@ -858,10 +865,12 @@ async def test_pb_service_write( func_name[do_write[FUNC]].return_value = do_return[VALUE] await hass.services.async_call(DOMAIN, do_write[SERVICE], data, blocking=True) assert func_name[do_write[FUNC]].called - assert func_name[do_write[FUNC]].call_args[0] == ( - data[ATTR_ADDRESS], - data[do_write[DATA]], - ) + assert func_name[do_write[FUNC]].call_args.args == (data[ATTR_ADDRESS],) + assert func_name[do_write[FUNC]].call_args.kwargs == { + "slave": 17, + value_arg_name[do_write[FUNC]]: data[do_write[DATA]], + } + if do_return[DATA]: assert any(message.startswith("Pymodbus:") for message in caplog.messages) @@ -1265,3 +1274,56 @@ async def test_no_entities(hass: HomeAssistant) -> None: ] } assert await async_setup_component(hass, DOMAIN, config) is False + + +@pytest.mark.parametrize( + ("do_config", "expected_slave_value"), + [ + ( + { + CONF_SENSORS: [ + { + CONF_NAME: "dummy", + CONF_ADDRESS: 1234, + }, + ], + }, + 1, + ), + ( + { + CONF_SENSORS: [ + { + CONF_NAME: "dummy", + CONF_ADDRESS: 1234, + CONF_SLAVE: 0, + }, + ], + }, + 0, + ), + ( + { + CONF_SENSORS: [ + { + CONF_NAME: "dummy", + CONF_ADDRESS: 1234, + CONF_DEVICE_ADDRESS: 6, + }, + ], + }, + 6, + ), + ], +) +async def test_check_default_slave( + hass: HomeAssistant, + mock_modbus, + do_config, + mock_do_cycle, + expected_slave_value: int, +) -> None: + """Test default slave.""" + assert mock_modbus.read_holding_registers.mock_calls + first_call = mock_modbus.read_holding_registers.mock_calls[0] + assert first_call.kwargs["slave"] == expected_slave_value diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index 0a61ab05f21..795a89e7e13 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -822,7 +822,7 @@ "parameterUnit": "", "writable": false, "timestamp": "2024-02-08T19:13:05+00:00", - "value": 30, + "value": 31, "strVal": "Heating", "smartHomeCategories": [], "minValue": null, diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 6fe6becff11..521823e282d 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -883,7 +883,7 @@ "parameterUnit": "", "writable": false, "timestamp": "2024-02-08T19:13:05+00:00", - "value": 30, + "value": 31, "strVal": "Heating", "smartHomeCategories": [], "minValue": null, @@ -2045,7 +2045,7 @@ "parameterUnit": "", "writable": false, "timestamp": "2024-02-08T19:13:05+00:00", - "value": 30, + "value": 31, "strVal": "Heating", "smartHomeCategories": [], "minValue": null, diff --git a/tests/components/myuplink/snapshots/test_sensor.ambr b/tests/components/myuplink/snapshots/test_sensor.ambr index a5469dc9a77..34acbbb8785 100644 --- a/tests/components/myuplink/snapshots/test_sensor.ambr +++ b/tests/components/myuplink/snapshots/test_sensor.ambr @@ -3396,7 +3396,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Heating', + 'state': 'unknown', }) # --- # name: test_sensor_states[sensor.gotham_city_priority_2-entry] @@ -3462,7 +3462,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Heating', + 'state': 'unknown', }) # --- # name: test_sensor_states[sensor.gotham_city_priority_raw-entry] @@ -3508,7 +3508,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '30', + 'state': '31', }) # --- # name: test_sensor_states[sensor.gotham_city_priority_raw_2-entry] @@ -3554,7 +3554,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '30', + 'state': '31', }) # --- # name: test_sensor_states[sensor.gotham_city_r_start_diff_additional_heat-entry] diff --git a/tests/components/network/test_system_health.py b/tests/components/network/test_system_health.py new file mode 100644 index 00000000000..eb383aafde7 --- /dev/null +++ b/tests/components/network/test_system_health.py @@ -0,0 +1,32 @@ +"""Test network system health.""" + +import asyncio + +import pytest + +from homeassistant.components.network.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import get_system_health_info + + +@pytest.mark.usefixtures("mock_socket_no_loopback") +async def test_network_system_health(hass: HomeAssistant) -> None: + """Test network system health.""" + + assert await async_setup_component(hass, "system_health", {}) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + info = await get_system_health_info(hass, "network") + + for key, val in info.items(): + if asyncio.iscoroutine(val): + info[key] = await val + + assert info == { + "adapters": "eth0 (disabled), lo0 (disabled), eth1 (enabled, default, auto), vtun0 (disabled)", + "announce_addresses": "192.168.1.5", + "ipv4_addresses": "eth0 (), lo0 (127.0.0.1/8), eth1 (192.168.1.5/23), vtun0 (169.254.3.2/16)", + "ipv6_addresses": "eth0 (2001:db8::/8), lo0 (), eth1 (), vtun0 ()", + } diff --git a/tests/components/ohme/conftest.py b/tests/components/ohme/conftest.py index 9a196a5b231..3d3db730d08 100644 --- a/tests/components/ohme/conftest.py +++ b/tests/components/ohme/conftest.py @@ -54,6 +54,8 @@ def mock_client(): client.status = ChargerStatus.CHARGING client.power = ChargerPower(0, 0, 0, 0) + client.target_soc = 50 + client.target_time = (8, 0) client.battery = 80 client.serial = "chargerid" client.ct_connected = True diff --git a/tests/components/ohme/snapshots/test_number.ambr b/tests/components/ohme/snapshots/test_number.ambr new file mode 100644 index 00000000000..580082635df --- /dev/null +++ b/tests/components/ohme/snapshots/test_number.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_numbers[number.ohme_home_pro_target_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.ohme_home_pro_target_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Target percentage', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'target_percentage', + 'unique_id': 'chargerid_target_percentage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_numbers[number.ohme_home_pro_target_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Target percentage', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.ohme_home_pro_target_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- diff --git a/tests/components/ohme/snapshots/test_time.ambr b/tests/components/ohme/snapshots/test_time.ambr new file mode 100644 index 00000000000..4d9fab20e0b --- /dev/null +++ b/tests/components/ohme/snapshots/test_time.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_time[time.ohme_home_pro_target_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'time', + 'entity_category': None, + 'entity_id': 'time.ohme_home_pro_target_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Target time', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'target_time', + 'unique_id': 'chargerid_target_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_time[time.ohme_home_pro_target_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Target time', + }), + 'context': , + 'entity_id': 'time.ohme_home_pro_target_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '08:00:00', + }) +# --- diff --git a/tests/components/ohme/test_number.py b/tests/components/ohme/test_number.py new file mode 100644 index 00000000000..9cfce2a850f --- /dev/null +++ b/tests/components/ohme/test_number.py @@ -0,0 +1,55 @@ +"""Tests for numbers.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_numbers( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme sensors.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_set_number( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the number set.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ + ATTR_VALUE: 100, + }, + target={ + ATTR_ENTITY_ID: "number.ohme_home_pro_target_percentage", + }, + blocking=True, + ) + + assert len(mock_client.async_set_target.mock_calls) == 1 diff --git a/tests/components/ohme/test_time.py b/tests/components/ohme/test_time.py new file mode 100644 index 00000000000..0562dfa124c --- /dev/null +++ b/tests/components/ohme/test_time.py @@ -0,0 +1,55 @@ +"""Tests for time.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_time( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme sensors.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.TIME]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_set_time( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the time set.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + service_data={ + ATTR_TIME: "00:00:00", + }, + target={ + ATTR_ENTITY_ID: "time.ohme_home_pro_target_time", + }, + blocking=True, + ) + + assert len(mock_client.async_set_target.mock_calls) == 1 diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index 865bc1a6bbf..203cc22cf95 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -10,6 +10,7 @@ from homeassistant.components.onkyo import InputSource from homeassistant.components.onkyo.config_flow import OnkyoConfigFlow from homeassistant.components.onkyo.const import ( DOMAIN, + OPTION_INPUT_SOURCES, OPTION_MAX_VOLUME, OPTION_VOLUME_RESOLUTION, ) @@ -87,35 +88,6 @@ async def test_manual_invalid_host(hass: HomeAssistant, stub_mock_discovery) -> assert host_result["errors"]["base"] == "cannot_connect" -async def test_ssdp_discovery_already_configured( - hass: HomeAssistant, default_mock_discovery -) -> None: - """Test SSDP discovery with already configured device.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_HOST: "192.168.1.100"}, - unique_id="id1", - ) - config_entry.add_to_hass(hass) - - discovery_info = SsdpServiceInfo( - ssdp_location="http://192.168.1.100:8080", - upnp={ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, - ssdp_usn="uuid:mock_usn", - ssdp_udn="uuid:00000000-0000-0000-0000-000000000000", - ssdp_st="mock_st", - ) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_SSDP}, - data=discovery_info, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - async def test_manual_valid_host_unexpected_error( hass: HomeAssistant, empty_mock_discovery ) -> None: @@ -262,6 +234,35 @@ async def test_ssdp_discovery_success( assert select_result["result"].unique_id == "id1" +async def test_ssdp_discovery_already_configured( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with already configured device.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "192.168.1.100"}, + unique_id="id1", + ) + config_entry.add_to_hass(hass) + + discovery_info = SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_udn="uuid:00000000-0000-0000-0000-000000000000", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_ssdp_discovery_host_info_error(hass: HomeAssistant) -> None: """Test SSDP discovery with host info error.""" discovery_info = SsdpServiceInfo( @@ -466,7 +467,7 @@ async def test_reconfigure(hass: HomeAssistant, default_mock_discovery) -> None: await setup_integration(hass, config_entry, receiver_info) old_host = config_entry.data[CONF_HOST] - old_max_volume = config_entry.options[OPTION_MAX_VOLUME] + old_options = config_entry.options result = await config_entry.start_reconfigure_flow(hass) @@ -483,7 +484,7 @@ async def test_reconfigure(hass: HomeAssistant, default_mock_discovery) -> None: result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], - user_input={"volume_resolution": 200, "input_sources": ["TUNER"]}, + user_input={OPTION_VOLUME_RESOLUTION: 200}, ) assert result3["type"] is FlowResultType.ABORT @@ -491,7 +492,10 @@ async def test_reconfigure(hass: HomeAssistant, default_mock_discovery) -> None: assert config_entry.data[CONF_HOST] == old_host assert config_entry.options[OPTION_VOLUME_RESOLUTION] == 200 - assert config_entry.options[OPTION_MAX_VOLUME] == old_max_volume + for option, option_value in old_options.items(): + if option == OPTION_VOLUME_RESOLUTION: + continue + assert config_entry.options[option] == option_value async def test_reconfigure_new_device(hass: HomeAssistant) -> None: @@ -610,8 +614,8 @@ async def test_import_success( "ignore_translations", [ [ # The schema is dynamically created from input sources - "component.onkyo.options.step.init.data.TV", - "component.onkyo.options.step.init.data_description.TV", + "component.onkyo.options.step.names.sections.input_sources.data.TV", + "component.onkyo.options.step.names.sections.input_sources.data_description.TV", ] ], ) @@ -622,23 +626,43 @@ async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) config_entry = create_empty_config_entry() await setup_integration(hass, config_entry, receiver_info) + old_volume_resolution = config_entry.options[OPTION_VOLUME_RESOLUTION] + result = await hass.config_entries.options.async_init(config_entry.entry_id) - await hass.async_block_till_done() result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - "max_volume": 42, - "TV": "television", + OPTION_MAX_VOLUME: 42, + OPTION_INPUT_SOURCES: [], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] == {OPTION_INPUT_SOURCES: "empty_input_source_list"} + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + OPTION_MAX_VOLUME: 42, + OPTION_INPUT_SOURCES: ["TV"], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "names" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + OPTION_INPUT_SOURCES: {"TV": "television"}, }, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { - "volume_resolution": 80, - "max_volume": 42.0, - "input_sources": { - "12": "television", - }, + OPTION_VOLUME_RESOLUTION: old_volume_resolution, + OPTION_MAX_VOLUME: 42.0, + OPTION_INPUT_SOURCES: {"12": "television"}, } diff --git a/tests/components/onvif/test_parsers.py b/tests/components/onvif/test_parsers.py index 16172112c11..4f7e10abae6 100644 --- a/tests/components/onvif/test_parsers.py +++ b/tests/components/onvif/test_parsers.py @@ -426,6 +426,82 @@ async def test_tapo_tpsmartevent_person(hass: HomeAssistant) -> None: ) +async def test_tapo_tpsmartevent_pet(hass: HomeAssistant) -> None: + """Tests tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent - pet.""" + event = await get_event( + { + "SubscriptionReference": { + "Address": { + "_value_1": "http://192.168.56.63:2020/event-0_2020", + "_attr_1": None, + }, + "ReferenceParameters": None, + "Metadata": None, + "_value_1": None, + "_attr_1": None, + }, + "Topic": { + "_value_1": "tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent", + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + }, + "ProducerReference": { + "Address": { + "_value_1": "http://192.168.56.63:5656/event", + "_attr_1": None, + }, + "ReferenceParameters": None, + "Metadata": None, + "_value_1": None, + "_attr_1": None, + }, + "Message": { + "_value_1": { + "Source": { + "SimpleItem": [ + { + "Name": "VideoSourceConfigurationToken", + "Value": "vsconf", + }, + { + "Name": "VideoAnalyticsConfigurationToken", + "Value": "VideoAnalyticsToken", + }, + {"Name": "Rule", "Value": "MyTPSmartEventDetectorRule"}, + ], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Key": None, + "Data": { + "SimpleItem": [{"Name": "IsPet", "Value": "true"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Extension": None, + "UtcTime": datetime.datetime( + 2025, 1, 22, 13, 24, 57, tzinfo=datetime.UTC + ), + "PropertyOperation": "Changed", + "_attr_1": {}, + } + }, + } + ) + + assert event is not None + assert event.name == "Pet Detection" + assert event.platform == "binary_sensor" + assert event.device_class == "motion" + assert event.value + assert event.uid == ( + f"{TEST_UID}_tns1:RuleEngine/TPSmartEventDetector/" + "TPSmartEvent_VideoSourceToken_VideoAnalyticsToken_MyTPSmartEventDetectorRule" + ) + + async def test_tapo_cellmotiondetector_person(hass: HomeAssistant) -> None: """Tests tns1:RuleEngine/CellMotionDetector/People - person.""" event = await get_event( diff --git a/tests/components/overseerr/snapshots/test_event.ambr b/tests/components/overseerr/snapshots/test_event.ambr index 9bf23efb8f6..1002bc4cdad 100644 --- a/tests/components/overseerr/snapshots/test_event.ambr +++ b/tests/components/overseerr/snapshots/test_event.ambr @@ -44,7 +44,7 @@ # name: test_entities[event.overseerr_last_media_event-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'comment': None, + 'entity_picture': 'https://image.tmdb.org/t/p/w600_and_h900_bestv2/something.jpg', 'event_type': 'auto_approved', 'event_types': list([ 'pending', @@ -55,19 +55,16 @@ 'auto_approved', ]), 'friendly_name': 'Overseerr Last media event', - 'image': 'https://image.tmdb.org/t/p/w600_and_h900_bestv2/something.jpg', - 'issue': None, 'media': dict({ 'media_type': 'movie', - 'status': 'PENDING', - 'status4k': 'UNKNOWN', - 'tmdb_id': '123', - 'tvdb_id': '', + 'status': 'pending', + 'status4k': 'unknown', + 'tmdb_id': 123, + 'tvdb_id': None, }), 'message': 'Here is an interesting Linux ISO that was automatically approved.', - 'notification_type': 'MEDIA_AUTO_APPROVED', 'request': dict({ - 'request_id': '16', + 'request_id': 16, 'requested_by_avatar': 'https://plex.tv/users/abc/avatar?c=123', 'requested_by_email': 'my@email.com', 'requested_by_settings_discord_id': '123', diff --git a/tests/components/overseerr/test_config_flow.py b/tests/components/overseerr/test_config_flow.py index 487c843ff1c..6a3b086a8e2 100644 --- a/tests/components/overseerr/test_config_flow.py +++ b/tests/components/overseerr/test_config_flow.py @@ -3,7 +3,10 @@ from unittest.mock import AsyncMock, patch import pytest -from python_overseerr.exceptions import OverseerrConnectionError +from python_overseerr.exceptions import ( + OverseerrAuthenticationError, + OverseerrConnectionError, +) from homeassistant.components.overseerr.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -61,13 +64,22 @@ async def test_full_flow( } +@pytest.mark.parametrize( + ("exception", "error"), + [ + (OverseerrAuthenticationError, "invalid_auth"), + (OverseerrConnectionError, "cannot_connect"), + ], +) async def test_flow_errors( hass: HomeAssistant, mock_overseerr_client: AsyncMock, mock_setup_entry: AsyncMock, + exception: Exception, + error: str, ) -> None: """Test flow errors.""" - mock_overseerr_client.get_request_count.side_effect = OverseerrConnectionError() + mock_overseerr_client.get_request_count.side_effect = exception result = await hass.config_entries.flow.async_init( DOMAIN, @@ -82,7 +94,7 @@ async def test_flow_errors( ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} + assert result["errors"] == {"base": error} mock_overseerr_client.get_request_count.side_effect = None @@ -143,3 +155,148 @@ async def test_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_overseerr_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "new-test-key"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert mock_config_entry.data[CONF_API_KEY] == "new-test-key" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (OverseerrAuthenticationError, "invalid_auth"), + (OverseerrConnectionError, "cannot_connect"), + ], +) +async def test_reauth_flow_errors( + hass: HomeAssistant, + mock_overseerr_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reauth flow.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_overseerr_client.get_request_count.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "new-test-key"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_overseerr_client.get_request_count.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "new-test-key"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert mock_config_entry.data[CONF_API_KEY] == "new-test-key" + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_overseerr_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://overseerr2.test", CONF_API_KEY: "new-key"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data == { + CONF_HOST: "overseerr2.test", + CONF_PORT: 80, + CONF_SSL: False, + CONF_API_KEY: "new-key", + CONF_WEBHOOK_ID: WEBHOOK_ID, + } + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (OverseerrAuthenticationError, "invalid_auth"), + (OverseerrConnectionError, "cannot_connect"), + ], +) +async def test_reconfigure_flow_errors( + hass: HomeAssistant, + mock_overseerr_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reconfigure flow errors.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + mock_overseerr_client.get_request_count.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://overseerr2.test", CONF_API_KEY: "new-key"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_overseerr_client.get_request_count.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://overseerr2.test", CONF_API_KEY: "new-key"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/overseerr/test_event.py b/tests/components/overseerr/test_event.py index 7ad6b53c7ed..3866ccc09ca 100644 --- a/tests/components/overseerr/test_event.py +++ b/tests/components/overseerr/test_event.py @@ -107,3 +107,64 @@ async def test_event_goes_unavailable( assert ( hass.states.get("event.overseerr_last_media_event").state == STATE_UNAVAILABLE ) + + +async def test_not_push_based( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_overseerr_client_needs_change: AsyncMock, +) -> None: + """Test event entities aren't created if not push based.""" + + mock_overseerr_client_needs_change.test_webhook_notification_config.return_value = ( + False + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("event.overseerr_last_media_event") is None + + +async def test_cant_fetch_webhook_config( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_overseerr_client: AsyncMock, +) -> None: + """Test event entities aren't created if not push based.""" + + mock_overseerr_client.get_webhook_notification_config.side_effect = ( + OverseerrConnectionError("Boom") + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("event.overseerr_last_media_event") is None + + +async def test_not_push_based_but_was_before( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_overseerr_client_needs_change: AsyncMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test event entities are created if push based in the past.""" + + entity_registry.async_get_or_create( + Platform.EVENT, + DOMAIN, + f"{mock_config_entry.entry_id}-media", + suggested_object_id="overseerr_last_media_event", + disabled_by=None, + ) + + mock_overseerr_client_needs_change.test_webhook_notification_config.return_value = ( + False + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("event.overseerr_last_media_event") is not None + + assert ( + hass.states.get("event.overseerr_last_media_event").state == STATE_UNAVAILABLE + ) diff --git a/tests/components/overseerr/test_init.py b/tests/components/overseerr/test_init.py index 4c6897ed316..6418e2103db 100644 --- a/tests/components/overseerr/test_init.py +++ b/tests/components/overseerr/test_init.py @@ -4,16 +4,19 @@ from typing import Any from unittest.mock import AsyncMock, patch import pytest +from python_overseerr import OverseerrAuthenticationError, OverseerrConnectionError from python_overseerr.models import WebhookNotificationOptions from syrupy import SnapshotAssertion from homeassistant.components import cloud +from homeassistant.components.cloud import CloudNotAvailable from homeassistant.components.overseerr import ( CONF_CLOUDHOOK_URL, JSON_PAYLOAD, REGISTERED_NOTIFICATIONS, ) from homeassistant.components.overseerr.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -23,6 +26,28 @@ from tests.common import MockConfigEntry from tests.components.cloud import mock_cloud +@pytest.mark.parametrize( + ("exception", "config_entry_state"), + [ + (OverseerrAuthenticationError, ConfigEntryState.SETUP_ERROR), + (OverseerrConnectionError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_initialization_errors( + hass: HomeAssistant, + mock_overseerr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + config_entry_state: ConfigEntryState, +) -> None: + """Test the Overseerr integration initialization errors.""" + mock_overseerr_client.get_request_count.side_effect = exception + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state == config_entry_state + + async def test_device_info( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -338,10 +363,50 @@ async def test_cloudhook_not_connecting( len( mock_overseerr_client_needs_change.test_webhook_notification_config.mock_calls ) - == 2 + == 3 ) mock_overseerr_client_needs_change.set_webhook_notification_config.assert_not_called() assert hass.config_entries.async_entries(DOMAIN) fake_create_cloudhook.assert_not_called() + + +async def test_removing_entry_with_cloud_unavailable( + hass: HomeAssistant, + mock_cloudhook_config_entry: MockConfigEntry, + mock_overseerr_client: AsyncMock, +) -> None: + """Test handling cloud unavailable when deleting entry.""" + + await mock_cloud(hass) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), + patch("homeassistant.components.cloud.async_is_connected", return_value=True), + patch.object(cloud, "async_active_subscription", return_value=True), + patch( + "homeassistant.components.cloud.async_create_cloudhook", + return_value="https://hooks.nabu.casa/ABCD", + ), + patch( + "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", + ), + patch( + "homeassistant.components.cloud.async_delete_cloudhook", + side_effect=CloudNotAvailable(), + ), + ): + await setup_integration(hass, mock_cloudhook_config_entry) + + assert cloud.async_active_subscription(hass) is True + + await hass.async_block_till_done() + assert hass.config_entries.async_entries(DOMAIN) + + for config_entry in hass.config_entries.async_entries(DOMAIN): + await hass.config_entries.async_remove(config_entry.entry_id) + + await hass.async_block_till_done() + assert not hass.config_entries.async_entries(DOMAIN) diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index e33a2f557de..fbcdcfbaff5 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -51,10 +51,8 @@ 'Hostname': 'PBLR-0000645', 'HwFixedCableRating': 20, 'HwFwCompat': 'wlac-2', - 'HwHas4pRelay': False, 'HwHasBop': True, 'HwHasBuzzer': True, - 'HwHasDualSocket': False, 'HwHasEichrechtLaserMarking': False, 'HwHasEthernet': True, 'HwHasLed': True, @@ -64,13 +62,11 @@ 'HwHasPlc': False, 'HwHasRfid': True, 'HwHasRs485': True, - 'HwHasShutter': False, 'HwHasSocket': False, 'HwHasTpm': False, 'HwHasWlan': True, 'HwMaxCurrent': 16, 'HwOneOrThreePhase': 3, - 'HwUKCompliant': False, 'MainboardPn': '6004-2300-7600', 'MainboardSn': '23-38-A4E-2MC', 'MeterCalIGainA': 267369, @@ -86,7 +82,6 @@ 'MeterCalVGainB': 246074, 'MeterCalVGainC': 230191, 'MeterFwIdent': 'b9cbcd', - 'NorFlash': 'True', 'ProductModelName': 'WLAC1-H11R0WE0ICR00', 'ProductPn': '6004-2300-8002', 'ProductSn': '23-45-A4O-MOF', diff --git a/tests/components/peblar/snapshots/test_sensor.ambr b/tests/components/peblar/snapshots/test_sensor.ambr index da17a4661ee..bb1a3eb34d6 100644 --- a/tests/components/peblar/snapshots/test_sensor.ambr +++ b/tests/components/peblar/snapshots/test_sensor.ambr @@ -302,7 +302,7 @@ 'installation_limit', 'local_modbus_api', 'local_rest_api', - 'local_scheduled', + 'local_scheduled_charging', 'ocpp_smart_charging', 'overcurrent_protection', 'phase_imbalance', @@ -354,7 +354,7 @@ 'installation_limit', 'local_modbus_api', 'local_rest_api', - 'local_scheduled', + 'local_scheduled_charging', 'ocpp_smart_charging', 'overcurrent_protection', 'phase_imbalance', diff --git a/tests/components/ping/test_helpers.py b/tests/components/ping/test_helpers.py new file mode 100644 index 00000000000..5a90c6b75b2 --- /dev/null +++ b/tests/components/ping/test_helpers.py @@ -0,0 +1,59 @@ +"""Test the exception handling in subprocess version of async_ping.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.ping.helpers import PingDataSubProcess +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +class MockAsyncSubprocess: + """Minimal mock implementation of asyncio.subprocess.Process for exception testing.""" + + def __init__(self, killsig=ProcessLookupError, **kwargs) -> None: + """Store provided exception type for later.""" + self.killsig = killsig + + async def communicate(self) -> None: + """Fails immediately with a timeout.""" + raise TimeoutError + + async def kill(self) -> None: + """Raise preset exception when called.""" + raise self.killsig + + +@pytest.mark.parametrize("exc", [TypeError, ProcessLookupError]) +async def test_async_ping_expected_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + exc: Exception, +) -> None: + """Test PingDataSubProcess.async_ping handles expected exceptions.""" + with patch( + "asyncio.create_subprocess_exec", return_value=MockAsyncSubprocess(killsig=exc) + ): + # Actual parameters irrelevant, as subprocess will not be created + ping = PingDataSubProcess(hass, host="10.10.10.10", count=3, privileged=False) + assert await ping.async_ping() is None + + +async def test_async_ping_unexpected_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test PingDataSubProcess.async_ping does not suppress unexpected exceptions.""" + with patch( + "asyncio.create_subprocess_exec", + return_value=MockAsyncSubprocess(killsig=KeyboardInterrupt), + ): + # Actual parameters irrelevant, as subprocess will not be created + ping = PingDataSubProcess(hass, host="10.10.10.10", count=3, privileged=False) + with pytest.raises(KeyboardInterrupt): + assert await ping.async_ping() is None diff --git a/tests/components/powerwall/test_init.py b/tests/components/powerwall/test_init.py index e271cde0fc4..dd70dbb7c65 100644 --- a/tests/components/powerwall/test_init.py +++ b/tests/components/powerwall/test_init.py @@ -1,17 +1,23 @@ """Tests for the PowerwallDataManager.""" import datetime -from unittest.mock import patch +from http.cookies import Morsel +from unittest.mock import MagicMock, patch +from aiohttp import CookieJar from tesla_powerwall import AccessDeniedError, LoginResponse -from homeassistant.components.powerwall.const import DOMAIN +from homeassistant.components.powerwall.const import ( + AUTH_COOKIE_KEY, + CONFIG_ENTRY_COOKIE, + DOMAIN, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.util.dt import utcnow -from .mocks import _mock_powerwall_with_fixtures +from .mocks import MOCK_GATEWAY_DIN, _mock_powerwall_with_fixtures from tests.common import MockConfigEntry, async_fire_time_changed @@ -37,7 +43,11 @@ async def test_update_data_reauthenticate_on_access_denied(hass: HomeAssistant) mock_powerwall.is_authenticated.return_value = True config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_IP_ADDRESS: "1.2.3.4", CONF_PASSWORD: "password"} + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.2.3.4", + CONF_PASSWORD: "password", + }, ) config_entry.add_to_hass(hass) with ( @@ -72,3 +82,226 @@ async def test_update_data_reauthenticate_on_access_denied(hass: HomeAssistant) assert len(flows) == 1 reauth_flow = flows[0] assert reauth_flow["context"]["source"] == "reauth" + + +async def test_init_uses_cookie_if_present(hass: HomeAssistant) -> None: + """Tests if the init will use the auth cookie if present. + + If the cookie is present, the login step will be skipped and info will be fetched directly (see _login_and_fetch_base_info). + """ + mock_powerwall = await _mock_powerwall_with_fixtures(hass) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.2.3.4", + CONF_PASSWORD: "somepassword", + CONFIG_ENTRY_COOKIE: "somecookie", + }, + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.powerwall.config_flow.Powerwall", + return_value=mock_powerwall, + ), + patch( + "homeassistant.components.powerwall.Powerwall", return_value=mock_powerwall + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert not mock_powerwall.login.called + assert mock_powerwall.get_gateway_din.called + + +async def test_init_uses_password_if_no_cookies(hass: HomeAssistant) -> None: + """Tests if the init will use the password if no auth cookie present.""" + mock_powerwall = await _mock_powerwall_with_fixtures(hass) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.2.3.4", + CONF_PASSWORD: "somepassword", + }, + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.powerwall.config_flow.Powerwall", + return_value=mock_powerwall, + ), + patch( + "homeassistant.components.powerwall.Powerwall", return_value=mock_powerwall + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + mock_powerwall.login.assert_called_with("somepassword") + assert mock_powerwall.get_charge.called + + +async def test_init_saves_the_cookie(hass: HomeAssistant) -> None: + """Tests that the cookie is properly saved.""" + mock_powerwall = await _mock_powerwall_with_fixtures(hass) + mock_jar = MagicMock(CookieJar) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.2.3.4", + CONF_PASSWORD: "somepassword", + }, + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.powerwall.config_flow.Powerwall", + return_value=mock_powerwall, + ), + patch( + "homeassistant.components.powerwall.Powerwall", return_value=mock_powerwall + ), + patch("homeassistant.components.powerwall.CookieJar", return_value=mock_jar), + ): + auth_cookie = Morsel() + auth_cookie.set(AUTH_COOKIE_KEY, "somecookie", "somecookie") + mock_jar.__iter__.return_value = [auth_cookie] + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.data[CONFIG_ENTRY_COOKIE] == "somecookie" + + +async def test_retry_ignores_cookie(hass: HomeAssistant) -> None: + """Tests that retrying uses the password instead.""" + mock_powerwall = await _mock_powerwall_with_fixtures(hass) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.2.3.4", + CONF_PASSWORD: "somepassword", + CONFIG_ENTRY_COOKIE: "somecookie", + }, + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.powerwall.config_flow.Powerwall", + return_value=mock_powerwall, + ), + patch( + "homeassistant.components.powerwall.Powerwall", return_value=mock_powerwall + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert not mock_powerwall.login.called + assert mock_powerwall.get_gateway_din.called + + mock_powerwall.login.reset_mock() + mock_powerwall.get_charge.reset_mock() + + mock_powerwall.get_charge.side_effect = [AccessDeniedError("test"), 90.0] + + async_fire_time_changed(hass, utcnow() + datetime.timedelta(minutes=1)) + await hass.async_block_till_done() + + mock_powerwall.login.assert_called_with("somepassword") + assert mock_powerwall.get_charge.call_count == 2 + + +async def test_reauth_ignores_and_clears_cookie(hass: HomeAssistant) -> None: + """Tests that the reauth flow uses password and clears the cookie.""" + mock_powerwall = await _mock_powerwall_with_fixtures(hass) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.2.3.4", + CONF_PASSWORD: "somepassword", + CONFIG_ENTRY_COOKIE: "somecookie", + }, + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.powerwall.config_flow.Powerwall", + return_value=mock_powerwall, + ), + patch( + "homeassistant.components.powerwall.Powerwall", return_value=mock_powerwall + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + mock_powerwall.login.reset_mock() + mock_powerwall.get_charge.reset_mock() + + mock_powerwall.get_charge.side_effect = [ + AccessDeniedError("test"), + AccessDeniedError("test"), + ] + + async_fire_time_changed(hass, utcnow() + datetime.timedelta(minutes=1)) + await hass.async_block_till_done() + + mock_powerwall.login.assert_called_with("somepassword") + assert mock_powerwall.get_charge.call_count == 2 + + flows = hass.config_entries.flow.async_progress(DOMAIN) + assert len(flows) == 1 + reauth_flow = flows[0] + assert reauth_flow["context"]["source"] == "reauth" + + mock_powerwall.login.reset_mock() + assert config_entry.data[CONFIG_ENTRY_COOKIE] is not None + + await hass.config_entries.flow.async_configure( + reauth_flow["flow_id"], {CONF_PASSWORD: "somepassword"} + ) + + mock_powerwall.login.assert_called_with("somepassword") + assert config_entry.data[CONFIG_ENTRY_COOKIE] is None + + +async def test_init_retries_with_password(hass: HomeAssistant) -> None: + """Tests that the init retries with password if cookie fails.""" + mock_powerwall = await _mock_powerwall_with_fixtures(hass) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.2.3.4", + CONF_PASSWORD: "somepassword", + CONFIG_ENTRY_COOKIE: "somecookie", + }, + ) + config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.powerwall.config_flow.Powerwall", + return_value=mock_powerwall, + ), + patch( + "homeassistant.components.powerwall.Powerwall", return_value=mock_powerwall + ), + ): + mock_powerwall.get_gateway_din.side_effect = [ + AccessDeniedError("get_gateway_din"), + MOCK_GATEWAY_DIN, + ] + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + mock_powerwall.login.assert_called_with("somepassword") + assert mock_powerwall.get_gateway_din.call_count == 2 diff --git a/tests/components/recorder/auto_repairs/events/test_schema.py b/tests/components/recorder/auto_repairs/events/test_schema.py index cae181a6270..91f5bd50298 100644 --- a/tests/components/recorder/auto_repairs/events/test_schema.py +++ b/tests/components/recorder/auto_repairs/events/test_schema.py @@ -8,12 +8,12 @@ from homeassistant.core import HomeAssistant from ...common import async_wait_recording_done -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" @@ -22,7 +22,7 @@ async def mock_recorder_before_hass( @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -58,7 +58,7 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_event_data( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -91,7 +91,7 @@ async def test_validate_db_schema_fix_utf8_issue_event_data( @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, diff --git a/tests/components/recorder/auto_repairs/states/test_schema.py b/tests/components/recorder/auto_repairs/states/test_schema.py index 915ac1f3500..982a6a732b6 100644 --- a/tests/components/recorder/auto_repairs/states/test_schema.py +++ b/tests/components/recorder/auto_repairs/states/test_schema.py @@ -8,12 +8,12 @@ from homeassistant.core import HomeAssistant from ...common import async_wait_recording_done -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" @@ -22,7 +22,7 @@ async def mock_recorder_before_hass( @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -60,7 +60,7 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_states( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -92,7 +92,7 @@ async def test_validate_db_schema_fix_utf8_issue_states( @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_state_attributes( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -125,7 +125,7 @@ async def test_validate_db_schema_fix_utf8_issue_state_attributes( @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, diff --git a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py index 9e287d13594..78a7ddaa300 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py +++ b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py @@ -22,12 +22,12 @@ import homeassistant.util.dt as dt_util from ...common import async_wait_recording_done from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" @@ -134,7 +134,7 @@ def _create_engine_28(*args, **kwargs): @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics.""" @@ -242,7 +242,7 @@ async def test_delete_metadata_duplicates( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates_many( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics.""" diff --git a/tests/components/recorder/auto_repairs/statistics/test_schema.py b/tests/components/recorder/auto_repairs/statistics/test_schema.py index 34a075afbc7..352a2345052 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_schema.py +++ b/tests/components/recorder/auto_repairs/statistics/test_schema.py @@ -8,12 +8,12 @@ from homeassistant.core import HomeAssistant from ...common import async_wait_recording_done -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" @@ -22,7 +22,7 @@ async def mock_recorder_before_hass( @pytest.mark.parametrize("enable_schema_validation", [True]) async def test_validate_db_schema_fix_utf8_issue( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -56,7 +56,7 @@ async def test_validate_db_schema_fix_utf8_issue( @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, table: str, db_engine: str, @@ -100,7 +100,7 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( hass: HomeAssistant, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, recorder_dialect_name: None, db_engine: str, diff --git a/tests/components/recorder/auto_repairs/test_schema.py b/tests/components/recorder/auto_repairs/test_schema.py index 857c0f6572f..bf2a925df17 100644 --- a/tests/components/recorder/auto_repairs/test_schema.py +++ b/tests/components/recorder/auto_repairs/test_schema.py @@ -18,12 +18,12 @@ from homeassistant.core import HomeAssistant from ..common import async_wait_recording_done -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_entity_registry.py b/tests/components/recorder/test_entity_registry.py index ad438dcc525..8a5ce23799c 100644 --- a/tests/components/recorder/test_entity_registry.py +++ b/tests/components/recorder/test_entity_registry.py @@ -23,7 +23,7 @@ from .common import ( ) from tests.common import MockEntity, MockEntityPlatform -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager def _count_entity_id_in_states_meta( @@ -40,7 +40,7 @@ def _count_entity_id_in_states_meta( @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index 28b8275247c..d9dbbf191f6 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -33,12 +33,12 @@ from .common import ( async_wait_recording_done, ) -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_32.py b/tests/components/recorder/test_history_db_schema_32.py index 666626ff688..bfe5c852ca6 100644 --- a/tests/components/recorder/test_history_db_schema_32.py +++ b/tests/components/recorder/test_history_db_schema_32.py @@ -28,12 +28,12 @@ from .common import ( old_db_schema, ) -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_42.py b/tests/components/recorder/test_history_db_schema_42.py index 85badeea281..23ac6f9fb8a 100644 --- a/tests/components/recorder/test_history_db_schema_42.py +++ b/tests/components/recorder/test_history_db_schema_42.py @@ -31,12 +31,12 @@ from .common import ( ) from .db_schema_42 import StateAttributes, States, StatesMeta -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 24070e6f156..f8d1ac4af57 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -98,12 +98,12 @@ from tests.common import ( async_test_home_assistant, mock_platform, ) -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager, RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" @@ -1373,7 +1373,7 @@ async def test_statistics_runs_initiated( @pytest.mark.parametrize("enable_missing_statistics", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_compile_missing_statistics( - async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory + async_test_recorder: RecorderInstanceContextManager, freezer: FrozenDateTimeFactory ) -> None: """Test missing statistics are compiled on startup.""" now = dt_util.utcnow().replace(minute=0, second=0, microsecond=0) @@ -1632,7 +1632,7 @@ async def test_service_disable_states_not_recording( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_service_disable_run_information_recorded( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test that runs are still recorded when recorder is disabled.""" diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 052e9202715..e60a4705ac8 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -37,12 +37,12 @@ from .common import async_wait_recording_done, create_engine_test from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager, RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 0624955b0e9..94b7518edb7 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -52,7 +52,7 @@ from .common import ( from .conftest import instrument_migration from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" @@ -60,7 +60,7 @@ SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" @@ -124,7 +124,7 @@ def db_schema_32(): @pytest.mark.parametrize("indices_to_drop", [[], [("events", "ix_events_context_id")]]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_events_context_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" @@ -396,7 +396,7 @@ async def test_migrate_events_context_ids( @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_finish_migrate_events_context_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test we re migrate old uuid context ids and ulid context ids to binary format. @@ -505,7 +505,7 @@ async def test_finish_migrate_events_context_ids( @pytest.mark.parametrize("indices_to_drop", [[], [("states", "ix_states_context_id")]]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_states_context_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" @@ -758,7 +758,7 @@ async def test_migrate_states_context_ids( @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_finish_migrate_states_context_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test we re migrate old uuid context ids and ulid context ids to binary format. @@ -866,7 +866,7 @@ async def test_finish_migrate_states_context_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_event_type_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test we can migrate event_types to the EventTypes table.""" importlib.import_module(SCHEMA_MODULE_32) @@ -984,7 +984,7 @@ async def test_migrate_event_type_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_entity_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" importlib.import_module(SCHEMA_MODULE_32) @@ -1092,7 +1092,7 @@ async def test_migrate_entity_ids( ) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_post_migrate_entity_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" @@ -1200,7 +1200,7 @@ async def test_post_migrate_entity_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_null_entity_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" importlib.import_module(SCHEMA_MODULE_32) @@ -1310,7 +1310,7 @@ async def test_migrate_null_entity_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_null_event_type_ids( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" importlib.import_module(SCHEMA_MODULE_32) @@ -1991,7 +1991,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_stats_migrate_times( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, ) -> None: """Test we can migrate times in the statistics tables.""" @@ -2147,7 +2147,7 @@ async def test_stats_migrate_times( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_cleanup_unmigrated_state_timestamps( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Ensure schema 48 migration cleans up any unmigrated state timestamps.""" importlib.import_module(SCHEMA_MODULE_32) diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 677abd6083c..43a1b028348 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -25,7 +25,7 @@ from homeassistant.core import HomeAssistant from .common import async_recorder_block_till_done, async_wait_recording_done from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" @@ -34,7 +34,7 @@ SCHEMA_MODULE_CURRENT = "homeassistant.components.recorder.db_schema" @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" @@ -175,7 +175,7 @@ def _create_engine_test( ], ) async def test_data_migrator_logic( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, initial_version: int, expected_migrator_calls: dict[str, tuple[int, int]], expected_created_indices: list[str], @@ -274,7 +274,7 @@ async def test_data_migrator_logic( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migration_changes_prevent_trying_to_migrate_again( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Test that we do not try to migrate when migration_changes indicate its already migrated. diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index c3ff5027b70..e5eea0cf89f 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -45,7 +45,7 @@ from .common import ( convert_pending_states_to_meta, ) -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager TEST_EVENT_TYPES = ( "EVENT_TEST_AUTOPURGE", @@ -59,7 +59,7 @@ TEST_EVENT_TYPES = ( @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index d68d1550268..45bef68dabd 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -47,12 +47,12 @@ from .db_schema_32 import ( StatisticsShortTerm, ) -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 6b1e1a655db..2baf7f2bcbc 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -54,12 +54,12 @@ from .common import ( ) from tests.common import MockPlatform, mock_platform -from tests.typing import RecorderInstanceGenerator, WebSocketGenerator +from tests.typing import RecorderInstanceContextManager, WebSocketGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index 1f9be0cabee..dafa4da81ee 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -27,7 +27,7 @@ from .common import ( ) from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager SCHEMA_VERSION_POSTFIX = "23_with_newer_columns" SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) @@ -37,7 +37,8 @@ SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) async def test_delete_duplicates( - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture + async_test_recorder: RecorderInstanceContextManager, + caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics. @@ -224,7 +225,8 @@ async def test_delete_duplicates( @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) async def test_delete_duplicates_many( - async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture + async_test_recorder: RecorderInstanceContextManager, + caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics. @@ -418,7 +420,7 @@ async def test_delete_duplicates_many( @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) async def test_delete_duplicates_non_identical( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, tmp_path: Path, ) -> None: @@ -613,7 +615,7 @@ async def test_delete_duplicates_non_identical( @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") async def test_delete_duplicates_short_term( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, tmp_path: Path, ) -> None: diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 4e6d664ec0a..c9020762d4b 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -55,12 +55,12 @@ from .common import ( ) from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager, RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 21f7037c370..58be23bdc85 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -23,7 +23,7 @@ from .common import async_wait_recording_done from .conftest import instrument_migration from tests.common import async_test_home_assistant -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE_30 = "tests.components.recorder.db_schema_30" @@ -73,7 +73,7 @@ def _create_engine_test( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_times( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, ) -> None: """Test we can migrate times in the events and states tables. @@ -240,7 +240,7 @@ async def test_migrate_times( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_can_resume_entity_id_post_migration( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: @@ -351,7 +351,7 @@ async def test_migrate_can_resume_entity_id_post_migration( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_can_resume_ix_states_event_id_removed( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: @@ -490,7 +490,7 @@ async def test_migrate_can_resume_ix_states_event_id_removed( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_out_of_disk_space_while_rebuild_states_table( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: @@ -670,7 +670,7 @@ async def test_out_of_disk_space_while_rebuild_states_table( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_out_of_disk_space_while_removing_foreign_key( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index 403384aee9f..94ed8da1b92 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -41,7 +41,11 @@ from .common import ( from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed -from tests.typing import RecorderInstanceGenerator, WebSocketGenerator +from tests.typing import ( + RecorderInstanceContextManager, + RecorderInstanceGenerator, + WebSocketGenerator, +) @pytest.fixture @@ -2623,7 +2627,7 @@ async def test_recorder_info_no_instance( async def test_recorder_info_migration_queue_exhausted( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, instrument_migration: InstrumentedMigration, ) -> None: """Test getting recorder status when recorder queue is exhausted.""" diff --git a/tests/components/rest/test_sensor.py b/tests/components/rest/test_sensor.py index 2e02063b215..d5fc5eca55c 100644 --- a/tests/components/rest/test_sensor.py +++ b/tests/components/rest/test_sensor.py @@ -591,7 +591,7 @@ async def test_update_with_no_template(hass: HomeAssistant) -> None: assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1 state = hass.states.get("sensor.foo") - assert state.state == '{"key": "some_json_value"}' + assert state.state == '{"key":"some_json_value"}' @respx.mock diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index d011926848d..fcf5a711c46 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -57,7 +57,7 @@ from tests.components.recorder.common import ( ) from tests.typing import ( MockHAClientWebSocket, - RecorderInstanceGenerator, + RecorderInstanceContextManager, WebSocketGenerator, ) @@ -102,7 +102,7 @@ KW_SENSOR_ATTRIBUTES = { @pytest.fixture async def mock_recorder_before_hass( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> None: """Set up recorder patches.""" diff --git a/tests/components/sensor/test_recorder_missing_stats.py b/tests/components/sensor/test_recorder_missing_stats.py index 43e18b89e72..449ffd55727 100644 --- a/tests/components/sensor/test_recorder_missing_stats.py +++ b/tests/components/sensor/test_recorder_missing_stats.py @@ -24,7 +24,7 @@ from tests.components.recorder.common import ( async_wait_recording_done, do_adhoc_statistics, ) -from tests.typing import RecorderInstanceGenerator +from tests.typing import RecorderInstanceContextManager POWER_SENSOR_ATTRIBUTES = { "device_class": "energy", @@ -47,7 +47,7 @@ def disable_db_issue_creation(): @pytest.mark.parametrize("enable_missing_statistics", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_compile_missing_statistics( - async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory + async_test_recorder: RecorderInstanceContextManager, freezer: FrozenDateTimeFactory ) -> None: """Test compile missing statistics.""" three_days_ago = datetime(2021, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) diff --git a/tests/components/seventeentrack/test_repairs.py b/tests/components/seventeentrack/test_repairs.py deleted file mode 100644 index 44d1f078432..00000000000 --- a/tests/components/seventeentrack/test_repairs.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Tests for the seventeentrack repair flow.""" - -from unittest.mock import AsyncMock - -from freezegun.api import FrozenDateTimeFactory - -from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.seventeentrack import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from . import goto_future, init_integration -from .conftest import DEFAULT_SUMMARY_LENGTH, get_package - -from tests.common import MockConfigEntry -from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow -from tests.typing import ClientSessionGenerator - - -async def test_repair( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - issue_registry: ir.IssueRegistry, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Ensure everything starts correctly.""" - await init_integration(hass, mock_config_entry) # 2 - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH - assert len(issue_registry.issues) == 1 - - package = get_package() - mock_seventeentrack.return_value.profile.packages.return_value = [package] - await goto_future(hass, freezer) - - assert hass.states.get("sensor.17track_package_friendly_name_1") - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - - assert "deprecated" not in mock_config_entry.data - - repair_issue = issue_registry.async_get_issue( - domain=DOMAIN, issue_id=f"deprecate_sensor_{mock_config_entry.entry_id}" - ) - - assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) - - client = await hass_client() - - data = await start_repair_fix_flow(client, DOMAIN, repair_issue.issue_id) - - flow_id = data["flow_id"] - assert data == { - "type": "form", - "flow_id": flow_id, - "handler": DOMAIN, - "step_id": "confirm", - "data_schema": [], - "errors": None, - "description_placeholders": None, - "last_step": None, - "preview": None, - } - - data = await process_repair_fix_flow(client, flow_id) - - flow_id = data["flow_id"] - assert data == { - "type": "create_entry", - "handler": DOMAIN, - "flow_id": flow_id, - "description": None, - "description_placeholders": None, - } - - assert mock_config_entry.data["deprecated"] - - repair_issue = issue_registry.async_get_issue( - domain=DOMAIN, issue_id="deprecate_sensor" - ) - - assert repair_issue is None - - await goto_future(hass, freezer) - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index a631996b4eb..5367fabba9e 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -2,7 +2,7 @@ from __future__ import annotations -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory from pyseventeentrack.errors import SeventeenTrackError @@ -63,87 +63,6 @@ async def test_login_exception( assert not hass.states.async_entity_ids("sensor") -async def test_add_package( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure package is added correctly when user add a new package.""" - package = get_package() - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.17track_package_friendly_name_1") - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - - package2 = get_package( - tracking_number="789", - friendly_name="friendly name 2", - info_text="info text 2", - location="location 2", - timestamp="2020-08-10 14:25", - ) - mock_seventeentrack.return_value.profile.packages.return_value = [package, package2] - - await goto_future(hass, freezer) - - assert hass.states.get("sensor.17track_package_friendly_name_1") is not None - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 - - -async def test_add_package_default_friendly_name( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure package is added correctly with default friendly name when user add a new package without his own friendly name.""" - package = get_package(friendly_name=None) - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - await init_integration(hass, mock_config_entry) - state_456 = hass.states.get("sensor.17track_package_456") - assert state_456 is not None - assert state_456.attributes["friendly_name"] == "17Track Package 456" - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - - -async def test_remove_package( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure entity is not there anymore if package is not there.""" - package1 = get_package() - package2 = get_package( - tracking_number="789", - friendly_name="friendly name 2", - info_text="info text 2", - location="location 2", - timestamp="2020-08-10 14:25", - ) - - mock_seventeentrack.return_value.profile.packages.return_value = [ - package1, - package2, - ] - - await init_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.17track_package_friendly_name_1") is not None - assert hass.states.get("sensor.17track_package_friendly_name_2") is not None - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 - - mock_seventeentrack.return_value.profile.packages.return_value = [package2] - - await goto_future(hass, freezer) - - assert hass.states.get("sensor.17track_package_friendly_name_1") is None - assert hass.states.get("sensor.17track_package_friendly_name_2") is not None - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - - async def test_package_error( hass: HomeAssistant, mock_seventeentrack: AsyncMock, @@ -159,72 +78,6 @@ async def test_package_error( assert hass.states.get("sensor.17track_package_friendly_name_1") is None -async def test_delivered_not_shown( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_seventeentrack: AsyncMock, - mock_config_entry_with_default_options: MockConfigEntry, -) -> None: - """Ensure delivered packages are not shown.""" - package = get_package(status=40) - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - with patch( - "homeassistant.components.seventeentrack.sensor.persistent_notification" - ) as persistent_notification_mock: - await init_integration(hass, mock_config_entry_with_default_options) - await goto_future(hass, freezer) - - assert hass.states.get("sensor.17track_package_friendly_name_1") is None - persistent_notification_mock.create.assert_called() - - -async def test_delivered_shown( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure delivered packages are show when user choose to show them.""" - package = get_package(status=40) - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - with patch( - "homeassistant.components.seventeentrack.sensor.persistent_notification" - ) as persistent_notification_mock: - await init_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.17track_package_friendly_name_1") is not None - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - persistent_notification_mock.create.assert_not_called() - - -async def test_becomes_delivered_not_shown_notification( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_seventeentrack: AsyncMock, - mock_config_entry_with_default_options: MockConfigEntry, -) -> None: - """Ensure notification is triggered when package becomes delivered.""" - package = get_package() - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - await init_integration(hass, mock_config_entry_with_default_options) - - assert hass.states.get("sensor.17track_package_friendly_name_1") is not None - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - - package_delivered = get_package(status=40) - mock_seventeentrack.return_value.profile.packages.return_value = [package_delivered] - - with patch( - "homeassistant.components.seventeentrack.sensor.persistent_notification" - ) as persistent_notification_mock: - await goto_future(hass, freezer) - - persistent_notification_mock.create.assert_called() - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH - - async def test_summary_correctly_updated( hass: HomeAssistant, freezer: FrozenDateTimeFactory, @@ -237,7 +90,7 @@ async def test_summary_correctly_updated( await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH state_ready_picked = hass.states.get("sensor.17track_ready_to_be_picked_up") assert state_ready_picked is not None @@ -278,25 +131,6 @@ async def test_summary_error( ) -async def test_utc_timestamp( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure package timestamp is converted correctly from HA-defined time zone to UTC.""" - - package = get_package(tz="Asia/Jakarta") - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - await init_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.17track_package_friendly_name_1") is not None - assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - state_456 = hass.states.get("sensor.17track_package_friendly_name_1") - assert state_456 is not None - assert str(state_456.attributes.get("timestamp")) == "2020-08-10 03:32:00+00:00" - - async def test_non_valid_platform_config( hass: HomeAssistant, mock_seventeentrack: AsyncMock ) -> None: diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index cd4cdf877a5..9ea66c1acb7 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -9,6 +9,7 @@ import pytest from homeassistant.components.shelly.const import ( DOMAIN, GEN1_RELEASE_URL, + GEN2_BETA_RELEASE_URL, GEN2_RELEASE_URL, ) from homeassistant.components.update import ( @@ -572,7 +573,6 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_LATEST_VERSION] == "1" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - assert state.attributes[ATTR_RELEASE_URL] is None monkeypatch.setitem( mock_rpc_device.status["sys"], @@ -589,7 +589,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False - assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None + assert state.attributes[ATTR_RELEASE_URL] == GEN2_BETA_RELEASE_URL await hass.services.async_call( UPDATE_DOMAIN, diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 17bcd6e3d40..b7c3dff10f6 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -17,7 +17,11 @@ from aioshelly.const import ( ) import pytest -from homeassistant.components.shelly.const import GEN1_RELEASE_URL, GEN2_RELEASE_URL +from homeassistant.components.shelly.const import ( + GEN1_RELEASE_URL, + GEN2_BETA_RELEASE_URL, + GEN2_RELEASE_URL, +) from homeassistant.components.shelly.utils import ( get_block_channel_name, get_block_device_sleep_period, @@ -300,7 +304,7 @@ async def test_get_rpc_input_triggers( (1, MODEL_1, True, None), (2, MODEL_WALL_DISPLAY, False, None), (2, MODEL_PLUS_2PM_V2, False, GEN2_RELEASE_URL), - (2, MODEL_PLUS_2PM_V2, True, None), + (2, MODEL_PLUS_2PM_V2, True, GEN2_BETA_RELEASE_URL), ], ) def test_get_release_url( diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py index 146f8e268a4..c4aea195aa7 100644 --- a/tests/components/smlight/test_config_flow.py +++ b/tests/components/smlight/test_config_flow.py @@ -3,6 +3,7 @@ from ipaddress import ip_address from unittest.mock import AsyncMock, MagicMock +from pysmlight import Info from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError import pytest @@ -97,7 +98,7 @@ async def test_zeroconf_flow( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_zeroconf_flow_auth( @@ -151,12 +152,99 @@ async def test_zeroconf_flow_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 3 + + +async def test_zeroconf_unsupported_abort( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test we abort zeroconf flow if device unsupported.""" + mock_smlight_client.get_info.return_value = Info(model="SLZB-X") + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO + ) + + assert result["description_placeholders"] == {"host": MOCK_HOST} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_discovery" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "unsupported_device" + + +async def test_user_unsupported_abort( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test we abort user flow if unsupported device.""" + mock_smlight_client.get_info.return_value = Info(model="SLZB-X") + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCK_HOST, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "unsupported_device" + + +async def test_user_unsupported_abort_auth( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test we abort user flow if unsupported device (with auth).""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightAuthError + mock_smlight_client.get_info.side_effect = SmlightAuthError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: MOCK_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + mock_smlight_client.get_info.side_effect = None + mock_smlight_client.get_info.return_value = Info(model="SLZB-X") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "unsupported_device" @pytest.mark.usefixtures("mock_smlight_client") async def test_user_device_exists_abort( - hass: HomeAssistant, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, ) -> None: """Test we abort user flow if device already configured.""" mock_config_entry.add_to_hass(hass) @@ -239,7 +327,7 @@ async def test_user_invalid_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 4 async def test_user_cannot_connect( @@ -276,7 +364,7 @@ async def test_user_cannot_connect( assert result2["title"] == "SLZB-06p7" assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 3 async def test_auth_cannot_connect( @@ -378,7 +466,7 @@ async def test_zeroconf_legacy_mac( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 2 + assert len(mock_smlight_client.get_info.mock_calls) == 3 async def test_reauth_flow( diff --git a/tests/components/spotify/test_media_player.py b/tests/components/spotify/test_media_player.py index 55e0ea8f1d8..456af43d411 100644 --- a/tests/components/spotify/test_media_player.py +++ b/tests/components/spotify/test_media_player.py @@ -641,3 +641,147 @@ async def test_no_album_images( state = hass.states.get("media_player.spotify_spotify_1") assert state assert ATTR_ENTITY_PICTURE not in state.attributes + + +@pytest.mark.usefixtures("setup_credentials") +async def test_normal_polling_interval( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Spotify media player polling interval.""" + await setup_integration(hass, mock_config_entry) + + assert mock_spotify.return_value.get_playback.return_value.is_playing is True + assert ( + mock_spotify.return_value.get_playback.return_value.progress_ms + - mock_spotify.return_value.get_playback.return_value.item.duration_ms + < 30000 + ) + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_called_once() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_smart_polling_interval( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Spotify media player polling interval.""" + freezer.move_to("2023-10-21") + mock_spotify.return_value.get_playback.return_value.progress_ms = 10000 + mock_spotify.return_value.get_playback.return_value.item.duration_ms = 30000 + + await setup_integration(hass, mock_config_entry) + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + freezer.tick(timedelta(seconds=20)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_not_called() + + mock_spotify.return_value.get_playback.return_value.progress_ms = 10000 + mock_spotify.return_value.get_playback.return_value.item.duration_ms = 50000 + + freezer.tick(timedelta(seconds=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + freezer.tick(timedelta(seconds=21)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_not_called() + + freezer.tick(timedelta(seconds=9)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_smart_polling_interval_handles_errors( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Spotify media player polling interval.""" + mock_spotify.return_value.get_playback.return_value.progress_ms = 10000 + mock_spotify.return_value.get_playback.return_value.item.duration_ms = 30000 + + await setup_integration(hass, mock_config_entry) + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + mock_spotify.return_value.get_playback.side_effect = SpotifyConnectionError + + freezer.tick(timedelta(seconds=21)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + freezer.tick(timedelta(seconds=21)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_not_called() + + freezer.tick(timedelta(seconds=9)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_smart_polling_interval_handles_paused( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Spotify media player polling interval.""" + mock_spotify.return_value.get_playback.return_value.progress_ms = 10000 + mock_spotify.return_value.get_playback.return_value.item.duration_ms = 30000 + mock_spotify.return_value.get_playback.return_value.is_playing = False + + await setup_integration(hass, mock_config_entry) + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() + + freezer.tick(timedelta(seconds=21)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_not_called() + + freezer.tick(timedelta(seconds=9)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playback.assert_called_once() + mock_spotify.return_value.get_playback.reset_mock() diff --git a/tests/components/synology_dsm/conftest.py b/tests/components/synology_dsm/conftest.py index 0e8f79ffd40..331c879332d 100644 --- a/tests/components/synology_dsm/conftest.py +++ b/tests/components/synology_dsm/conftest.py @@ -34,5 +34,5 @@ def fixture_dsm(): dsm.network.update = AsyncMock(return_value=True) dsm.surveillance_station.update = AsyncMock(return_value=True) dsm.upgrade.update = AsyncMock(return_value=True) - + dsm.file = AsyncMock(get_shared_folders=AsyncMock(return_value=None)) return dsm diff --git a/tests/components/synology_dsm/snapshots/test_config_flow.ambr b/tests/components/synology_dsm/snapshots/test_config_flow.ambr index 807ec764e52..384f6b885d7 100644 --- a/tests/components/synology_dsm/snapshots/test_config_flow.ambr +++ b/tests/components/synology_dsm/snapshots/test_config_flow.ambr @@ -84,3 +84,17 @@ 'verify_ssl': False, }) # --- +# name: test_user_with_filestation + dict({ + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 1234, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- diff --git a/tests/components/synology_dsm/test_backup.py b/tests/components/synology_dsm/test_backup.py new file mode 100644 index 00000000000..0cd119cf015 --- /dev/null +++ b/tests/components/synology_dsm/test_backup.py @@ -0,0 +1,709 @@ +"""Tests for the Synology DSM backup agent.""" + +from io import StringIO +from typing import Any +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +import pytest +from synology_dsm.api.file_station.models import SynoFileFile, SynoFileSharedFolder +from synology_dsm.exceptions import SynologyDSMAPIErrorException + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.components.synology_dsm.const import ( + CONF_BACKUP_PATH, + CONF_BACKUP_SHARE, + DOMAIN, +) +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_PORT, + CONF_SSL, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util.aiohttp import MockStreamReader + +from .consts import HOST, MACS, PASSWORD, PORT, SERIAL, USE_SSL, USERNAME + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +class MockStreamReaderChunked(MockStreamReader): + """Mock a stream reader with simulated chunked data.""" + + async def readchunk(self) -> tuple[bytes, bool]: + """Read bytes.""" + return (self._content.read(), False) + + +async def _mock_download_file(path: str, filename: str) -> MockStreamReader: + if filename == "abcd12ef_meta.json": + return MockStreamReader( + b'{"addons":[],"backup_id":"abcd12ef","date":"2025-01-09T20:14:35.457323+01:00",' + b'"database_included":true,"extra_metadata":{"instance_id":"36b3b7e984da43fc89f7bafb2645fa36",' + b'"with_automatic_settings":true},"folders":[],"homeassistant_included":true,' + b'"homeassistant_version":"2025.2.0.dev0","name":"Automatic backup 2025.2.0.dev0","protected":true,"size":13916160}' + ) + if filename == "abcd12ef.tar": + return MockStreamReaderChunked(b"backup data") + raise MockStreamReaderChunked(b"") + + +async def _mock_download_file_meta_ok_tar_missing( + path: str, filename: str +) -> MockStreamReader: + if filename == "abcd12ef_meta.json": + return MockStreamReader( + b'{"addons":[],"backup_id":"abcd12ef","date":"2025-01-09T20:14:35.457323+01:00",' + b'"database_included":true,"extra_metadata":{"instance_id":"36b3b7e984da43fc89f7bafb2645fa36",' + b'"with_automatic_settings":true},"folders":[],"homeassistant_included":true,' + b'"homeassistant_version":"2025.2.0.dev0","name":"Automatic backup 2025.2.0.dev0","protected":true,"size":13916160}' + ) + if filename == "abcd12ef.tar": + raise SynologyDSMAPIErrorException("api", "404", "not found") + raise MockStreamReaderChunked(b"") + + +async def _mock_download_file_meta_defect(path: str, filename: str) -> MockStreamReader: + if filename == "abcd12ef_meta.json": + return MockStreamReader(b"im not a json") + if filename == "abcd12ef.tar": + return MockStreamReaderChunked(b"backup data") + raise MockStreamReaderChunked(b"") + + +@pytest.fixture +def mock_dsm_with_filestation(): + """Mock a successful service with filestation support.""" + + with patch("homeassistant.components.synology_dsm.common.SynologyDSM") as dsm: + dsm.login = AsyncMock(return_value=True) + dsm.update = AsyncMock(return_value=True) + + dsm.surveillance_station.update = AsyncMock(return_value=True) + dsm.upgrade.update = AsyncMock(return_value=True) + dsm.utilisation = Mock(cpu_user_load=1, update=AsyncMock(return_value=True)) + dsm.network = Mock(update=AsyncMock(return_value=True), macs=MACS) + dsm.storage = Mock( + disks_ids=["sda", "sdb", "sdc"], + volumes_ids=["volume_1"], + update=AsyncMock(return_value=True), + ) + dsm.information = Mock(serial=SERIAL) + dsm.file = AsyncMock( + get_shared_folders=AsyncMock( + return_value=[ + SynoFileSharedFolder( + additional=None, + is_dir=True, + name="HA Backup", + path="/ha_backup", + ) + ] + ), + get_files=AsyncMock( + return_value=[ + SynoFileFile( + additional=None, + is_dir=False, + name="abcd12ef_meta.json", + path="/ha_backup/my_backup_path/abcd12ef_meta.json", + ), + SynoFileFile( + additional=None, + is_dir=False, + name="abcd12ef.tar", + path="/ha_backup/my_backup_path/abcd12ef.tar", + ), + ] + ), + download_file=_mock_download_file, + upload_file=AsyncMock(return_value=True), + delete_file=AsyncMock(return_value=True), + ) + dsm.logout = AsyncMock(return_value=True) + yield dsm + + +@pytest.fixture +def mock_dsm_without_filestation(): + """Mock a successful service with filestation support.""" + + with patch("homeassistant.components.synology_dsm.common.SynologyDSM") as dsm: + dsm.login = AsyncMock(return_value=True) + dsm.update = AsyncMock(return_value=True) + + dsm.surveillance_station.update = AsyncMock(return_value=True) + dsm.upgrade.update = AsyncMock(return_value=True) + dsm.utilisation = Mock(cpu_user_load=1, update=AsyncMock(return_value=True)) + dsm.network = Mock(update=AsyncMock(return_value=True), macs=MACS) + dsm.storage = Mock( + disks_ids=["sda", "sdb", "sdc"], + volumes_ids=["volume_1"], + update=AsyncMock(return_value=True), + ) + dsm.information = Mock(serial=SERIAL) + dsm.file = None + + yield dsm + + +@pytest.fixture +async def setup_dsm_with_filestation( + hass: HomeAssistant, + mock_dsm_with_filestation: MagicMock, +): + """Mock setup of synology dsm config entry.""" + with ( + patch( + "homeassistant.components.synology_dsm.common.SynologyDSM", + return_value=mock_dsm_with_filestation, + ), + patch("homeassistant.components.synology_dsm.PLATFORMS", return_value=[]), + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_SSL: USE_SSL, + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + CONF_MAC: MACS[0], + }, + options={ + CONF_BACKUP_PATH: "my_backup_path", + CONF_BACKUP_SHARE: "/ha_backup", + }, + unique_id="mocked_syno_dsm_entry", + ) + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + await hass.async_block_till_done() + + yield mock_dsm_with_filestation + + +async def test_agents_info( + hass: HomeAssistant, + setup_dsm_with_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [ + {"agent_id": "synology_dsm.Mock Title"}, + {"agent_id": "backup.local"}, + ], + } + + +async def test_agents_not_loaded( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent with no loaded config entry.""" + with patch("homeassistant.components.backup.is_hassio", return_value=False): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [ + {"agent_id": "backup.local"}, + ], + } + + +async def test_agents_on_unload( + hass: HomeAssistant, + setup_dsm_with_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent on un-loading config entry.""" + # config entry is loaded + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [ + {"agent_id": "synology_dsm.Mock Title"}, + {"agent_id": "backup.local"}, + ], + } + + # unload config entry + entries = hass.config_entries.async_loaded_entries(DOMAIN) + await hass.config_entries.async_unload(entries[0].entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [ + {"agent_id": "backup.local"}, + ], + } + + +async def test_agents_list_backups( + hass: HomeAssistant, + setup_dsm_with_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backups"] == [ + { + "addons": [], + "backup_id": "abcd12ef", + "date": "2025-01-09T20:14:35.457323+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2025.2.0.dev0", + "name": "Automatic backup 2025.2.0.dev0", + "protected": True, + "size": 13916160, + "agent_ids": ["synology_dsm.Mock Title"], + "failed_agent_ids": [], + "with_automatic_settings": None, + } + ] + + +async def test_agents_list_backups_error( + hass: HomeAssistant, + setup_dsm_with_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent error while list backups.""" + client = await hass_ws_client(hass) + + setup_dsm_with_filestation.file.get_files.side_effect = ( + SynologyDSMAPIErrorException("api", "500", "error") + ) + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"synology_dsm.Mock Title": "Failed to list backups"}, + "backups": [], + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, + "next_automatic_backup": None, + "next_automatic_backup_additional": False, + } + + +async def test_agents_list_backups_disabled_filestation( + hass: HomeAssistant, + mock_dsm_without_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent error while list backups when file station is disabled.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert not response["success"] + + +@pytest.mark.parametrize( + ("backup_id", "expected_result"), + [ + ( + "abcd12ef", + { + "addons": [], + "backup_id": "abcd12ef", + "date": "2025-01-09T20:14:35.457323+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2025.2.0.dev0", + "name": "Automatic backup 2025.2.0.dev0", + "protected": True, + "size": 13916160, + "agent_ids": ["synology_dsm.Mock Title"], + "failed_agent_ids": [], + "with_automatic_settings": None, + }, + ), + ( + "12345", + None, + ), + ], + ids=["found", "not_found"], +) +async def test_agents_get_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_dsm_with_filestation: MagicMock, + backup_id: str, + expected_result: dict[str, Any] | None, +) -> None: + """Test agent get backup.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backup"] == expected_result + + +async def test_agents_get_backup_not_existing( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent get not existing backup.""" + client = await hass_ws_client(hass) + backup_id = "ef34ab12" + + setup_dsm_with_filestation.file.download_file = AsyncMock( + side_effect=SynologyDSMAPIErrorException("api", "404", "not found") + ) + + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}, "backup": None} + + +async def test_agents_get_backup_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent error while get backup.""" + client = await hass_ws_client(hass) + backup_id = "ef34ab12" + + setup_dsm_with_filestation.file.get_files.side_effect = ( + SynologyDSMAPIErrorException("api", "500", "error") + ) + + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"synology_dsm.Mock Title": "Failed to list backups"}, + "backup": None, + } + + +async def test_agents_get_backup_defect_meta( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent error while get backup.""" + client = await hass_ws_client(hass) + backup_id = "ef34ab12" + + setup_dsm_with_filestation.file.download_file = _mock_download_file_meta_defect + + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}, "backup": None} + + +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent download backup.""" + client = await hass_client() + backup_id = "abcd12ef" + + resp = await client.get( + f"/api/backup/download/{backup_id}?agent_id=synology_dsm.Mock Title" + ) + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +async def test_agents_download_not_existing( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent download not existing backup.""" + client = await hass_client() + backup_id = "abcd12ef" + + setup_dsm_with_filestation.file.download_file = ( + _mock_download_file_meta_ok_tar_missing + ) + + resp = await client.get( + f"/api/backup/download/{backup_id}?agent_id=synology_dsm.Mock Title" + ) + assert resp.reason == "Internal Server Error" + assert resp.status == 500 + + +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + extra_metadata={}, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0, + ) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=synology_dsm.Mock Title", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + mock: AsyncMock = setup_dsm_with_filestation.file.upload_file + assert len(mock.mock_calls) == 2 + assert mock.call_args_list[0].kwargs["filename"] == "test-backup.tar" + assert mock.call_args_list[0].kwargs["path"] == "/ha_backup/my_backup_path" + assert mock.call_args_list[1].kwargs["filename"] == "test-backup_meta.json" + assert mock.call_args_list[1].kwargs["path"] == "/ha_backup/my_backup_path" + + +async def test_agents_upload_error( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent error while uploading backup.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + extra_metadata={}, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0, + ) + + # fail to upload the tar file + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + setup_dsm_with_filestation.file.upload_file.side_effect = ( + SynologyDSMAPIErrorException("api", "500", "error") + ) + resp = await client.post( + "/api/backup/upload?agent_id=synology_dsm.Mock Title", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + assert "Failed to upload backup" in caplog.text + mock: AsyncMock = setup_dsm_with_filestation.file.upload_file + assert len(mock.mock_calls) == 1 + assert mock.call_args_list[0].kwargs["filename"] == "test-backup.tar" + assert mock.call_args_list[0].kwargs["path"] == "/ha_backup/my_backup_path" + + # fail to upload the meta json file + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + setup_dsm_with_filestation.file.upload_file.side_effect = [ + True, + SynologyDSMAPIErrorException("api", "500", "error"), + ] + + resp = await client.post( + "/api/backup/upload?agent_id=synology_dsm.Mock Title", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + assert "Failed to upload backup" in caplog.text + mock: AsyncMock = setup_dsm_with_filestation.file.upload_file + assert len(mock.mock_calls) == 3 + assert mock.call_args_list[1].kwargs["filename"] == "test-backup.tar" + assert mock.call_args_list[1].kwargs["path"] == "/ha_backup/my_backup_path" + assert mock.call_args_list[2].kwargs["filename"] == "test-backup_meta.json" + assert mock.call_args_list[2].kwargs["path"] == "/ha_backup/my_backup_path" + + +async def test_agents_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abcd12ef" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + mock: AsyncMock = setup_dsm_with_filestation.file.delete_file + assert len(mock.mock_calls) == 2 + assert mock.call_args_list[0].kwargs["filename"] == "abcd12ef.tar" + assert mock.call_args_list[0].kwargs["path"] == "/ha_backup/my_backup_path" + assert mock.call_args_list[1].kwargs["filename"] == "abcd12ef_meta.json" + assert mock.call_args_list[1].kwargs["path"] == "/ha_backup/my_backup_path" + + +async def test_agents_delete_not_existing( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test delete not existing backup.""" + client = await hass_ws_client(hass) + backup_id = "ef34ab12" + + setup_dsm_with_filestation.file.delete_file = AsyncMock( + side_effect=SynologyDSMAPIErrorException("api", "404", "not found") + ) + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"synology_dsm.Mock Title": "Failed to delete the backup"} + } + + +async def test_agents_delete_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_dsm_with_filestation: MagicMock, +) -> None: + """Test error while delete backup.""" + client = await hass_ws_client(hass) + + # error while delete + backup_id = "abcd12ef" + setup_dsm_with_filestation.file.delete_file.side_effect = ( + SynologyDSMAPIErrorException("api", "404", "not found") + ) + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"synology_dsm.Mock Title": "Failed to delete the backup"} + } + mock: AsyncMock = setup_dsm_with_filestation.file.delete_file + assert len(mock.mock_calls) == 1 + assert mock.call_args_list[0].kwargs["filename"] == "abcd12ef.tar" + assert mock.call_args_list[0].kwargs["path"] == "/ha_backup/my_backup_path" diff --git a/tests/components/synology_dsm/test_config_flow.py b/tests/components/synology_dsm/test_config_flow.py index 3ef47292a9b..b63ce6c2e18 100644 --- a/tests/components/synology_dsm/test_config_flow.py +++ b/tests/components/synology_dsm/test_config_flow.py @@ -4,6 +4,7 @@ from ipaddress import ip_address from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from synology_dsm.api.file_station.models import SynoFileSharedFolder from synology_dsm.exceptions import ( SynologyDSMException, SynologyDSMLogin2SAFailedException, @@ -15,9 +16,9 @@ from syrupy import SnapshotAssertion from homeassistant.components.synology_dsm.config_flow import CONF_OTP_CODE from homeassistant.components.synology_dsm.const import ( + CONF_BACKUP_PATH, + CONF_BACKUP_SHARE, CONF_SNAPSHOT_QUALITY, - DEFAULT_SCAN_INTERVAL, - DEFAULT_SNAPSHOT_QUALITY, DOMAIN, ) from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF @@ -73,7 +74,7 @@ def mock_controller_service(): update=AsyncMock(return_value=True), ) dsm.information = Mock(serial=SERIAL) - + dsm.file = AsyncMock(get_shared_folders=AsyncMock(return_value=None)) yield dsm @@ -96,6 +97,7 @@ def mock_controller_service_2sa(): update=AsyncMock(return_value=True), ) dsm.information = Mock(serial=SERIAL) + dsm.file = AsyncMock(get_shared_folders=AsyncMock(return_value=None)) yield dsm @@ -116,6 +118,39 @@ def mock_controller_service_vdsm(): update=AsyncMock(return_value=True), ) dsm.information = Mock(serial=SERIAL) + dsm.file = AsyncMock(get_shared_folders=AsyncMock(return_value=None)) + yield dsm + + +@pytest.fixture(name="service_with_filestation") +def mock_controller_service_with_filestation(): + """Mock a successful service with filestation support.""" + with patch("homeassistant.components.synology_dsm.config_flow.SynologyDSM") as dsm: + dsm.login = AsyncMock(return_value=True) + dsm.update = AsyncMock(return_value=True) + + dsm.surveillance_station.update = AsyncMock(return_value=True) + dsm.upgrade.update = AsyncMock(return_value=True) + dsm.utilisation = Mock(cpu_user_load=1, update=AsyncMock(return_value=True)) + dsm.network = Mock(update=AsyncMock(return_value=True), macs=MACS) + dsm.storage = Mock( + disks_ids=["sda", "sdb", "sdc"], + volumes_ids=["volume_1"], + update=AsyncMock(return_value=True), + ) + dsm.information = Mock(serial=SERIAL) + dsm.file = AsyncMock( + get_shared_folders=AsyncMock( + return_value=[ + SynoFileSharedFolder( + additional=None, + is_dir=True, + name="HA Backup", + path="/ha_backup", + ) + ] + ) + ) yield dsm @@ -137,7 +172,7 @@ def mock_controller_service_failed(): update=AsyncMock(return_value=True), ) dsm.information = Mock(serial=None) - + dsm.file = AsyncMock(get_shared_folders=AsyncMock(return_value=None)) yield dsm @@ -283,6 +318,55 @@ async def test_user_vdsm( assert result["data"] == snapshot +@pytest.mark.usefixtures("mock_setup_entry") +async def test_user_with_filestation( + hass: HomeAssistant, + service_with_filestation: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test user config.""" + with patch( + "homeassistant.components.synology_dsm.config_flow.SynologyDSM", + return_value=service_with_filestation, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=None + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.synology_dsm.config_flow.SynologyDSM", + return_value=service_with_filestation, + ): + # test with all provided + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_SSL: USE_SSL, + CONF_VERIFY_SSL: VERIFY_SSL, + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "backup_share" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_BACKUP_SHARE: "/ha_backup", CONF_BACKUP_PATH: "automatic_ha_backups"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == HOST + assert result["data"] == snapshot + + @pytest.mark.usefixtures("mock_setup_entry") async def test_reauth(hass: HomeAssistant, service: MagicMock) -> None: """Test reauthentication.""" @@ -560,46 +644,54 @@ async def test_existing_ssdp(hass: HomeAssistant, service: MagicMock) -> None: assert result["reason"] == "already_configured" -@pytest.mark.usefixtures("mock_setup_entry") -async def test_options_flow(hass: HomeAssistant, service: MagicMock) -> None: +async def test_options_flow( + hass: HomeAssistant, service_with_filestation: MagicMock +) -> None: """Test config flow options.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: HOST, - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - CONF_MAC: MACS, - }, - unique_id=SERIAL, - ) - config_entry.add_to_hass(hass) + with ( + patch( + "homeassistant.components.synology_dsm.common.SynologyDSM", + return_value=service_with_filestation, + ), + patch("homeassistant.components.synology_dsm.PLATFORMS", return_value=[]), + ): + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_SSL: USE_SSL, + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + CONF_MAC: MACS[0], + }, + unique_id=SERIAL, + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() - assert config_entry.options == {} + assert config_entry.options == {CONF_BACKUP_SHARE: None, CONF_BACKUP_PATH: None} result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" - # Scan interval - # Default - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options[CONF_SCAN_INTERVAL] == DEFAULT_SCAN_INTERVAL - assert config_entry.options[CONF_SNAPSHOT_QUALITY] == DEFAULT_SNAPSHOT_QUALITY - - # Manual result = await hass.config_entries.options.async_init(config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={CONF_SCAN_INTERVAL: 2, CONF_SNAPSHOT_QUALITY: 0}, + user_input={ + CONF_SCAN_INTERVAL: 2, + CONF_SNAPSHOT_QUALITY: 0, + CONF_BACKUP_PATH: "my_nackup_path", + CONF_BACKUP_SHARE: "/ha_backup", + }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert config_entry.options[CONF_SCAN_INTERVAL] == 2 assert config_entry.options[CONF_SNAPSHOT_QUALITY] == 0 + assert config_entry.options[CONF_BACKUP_PATH] == "my_nackup_path" + assert config_entry.options[CONF_BACKUP_SHARE] == "/ha_backup" @pytest.mark.usefixtures("mock_setup_entry") diff --git a/tests/components/synology_dsm/test_init.py b/tests/components/synology_dsm/test_init.py index 13d568e6137..7eaafc98437 100644 --- a/tests/components/synology_dsm/test_init.py +++ b/tests/components/synology_dsm/test_init.py @@ -4,7 +4,13 @@ from unittest.mock import MagicMock, patch from synology_dsm.exceptions import SynologyDSMLoginInvalidException -from homeassistant.components.synology_dsm.const import DOMAIN, SERVICES +from homeassistant.components.synology_dsm.const import ( + CONF_BACKUP_PATH, + CONF_BACKUP_SHARE, + DEFAULT_VERIFY_SSL, + DOMAIN, + SERVICES, +) from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -12,6 +18,7 @@ from homeassistant.const import ( CONF_PORT, CONF_SSL, CONF_USERNAME, + CONF_VERIFY_SSL, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -78,3 +85,38 @@ async def test_reauth_triggered(hass: HomeAssistant) -> None: assert not await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() mock_async_step_reauth.assert_called_once() + + +async def test_config_entry_migrations( + hass: HomeAssistant, mock_dsm: MagicMock +) -> None: + """Test if reauthentication flow is triggered.""" + with ( + patch( + "homeassistant.components.synology_dsm.common.SynologyDSM", + return_value=mock_dsm, + ), + patch("homeassistant.components.synology_dsm.PLATFORMS", return_value=[]), + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_SSL: USE_SSL, + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + CONF_MAC: MACS[0], + }, + ) + entry.add_to_hass(hass) + + assert CONF_VERIFY_SSL not in entry.data + assert CONF_BACKUP_SHARE not in entry.options + assert CONF_BACKUP_PATH not in entry.options + + assert await hass.config_entries.async_setup(entry.entry_id) + + assert entry.data[CONF_VERIFY_SSL] == DEFAULT_VERIFY_SSL + assert entry.options[CONF_BACKUP_SHARE] is None + assert entry.options[CONF_BACKUP_PATH] is None diff --git a/tests/components/synology_dsm/test_media_source.py b/tests/components/synology_dsm/test_media_source.py index 0c7ab6bc1cc..baa91822ca0 100644 --- a/tests/components/synology_dsm/test_media_source.py +++ b/tests/components/synology_dsm/test_media_source.py @@ -62,6 +62,7 @@ def dsm_with_photos() -> MagicMock: dsm.photos.get_item_thumbnail_url = AsyncMock( return_value="http://my.thumbnail.url" ) + dsm.file = AsyncMock(get_shared_folders=AsyncMock(return_value=None)) return dsm diff --git a/tests/components/tado/snapshots/test_climate.ambr b/tests/components/tado/snapshots/test_climate.ambr new file mode 100644 index 00000000000..6ba35b6f6f2 --- /dev/null +++ b/tests/components/tado/snapshots/test_climate.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_aircon_set_hvac_mode[cool-COOL] + _Call( + tuple( + 3, + 'NEXT_TIME_BLOCK', + 24.76, + None, + 'AIR_CONDITIONING', + 'ON', + 'COOL', + 'AUTO', + None, + None, + None, + None, + ), + dict({ + }), + ) +# --- +# name: test_aircon_set_hvac_mode[dry-DRY] + _Call( + tuple( + 3, + 'NEXT_TIME_BLOCK', + 24.76, + None, + 'AIR_CONDITIONING', + 'ON', + 'DRY', + None, + None, + None, + None, + None, + ), + dict({ + }), + ) +# --- +# name: test_aircon_set_hvac_mode[fan_only-FAN] + _Call( + tuple( + 3, + 'NEXT_TIME_BLOCK', + None, + None, + 'AIR_CONDITIONING', + 'ON', + 'FAN', + None, + None, + None, + None, + None, + ), + dict({ + }), + ) +# --- +# name: test_aircon_set_hvac_mode[heat-HEAT] + _Call( + tuple( + 3, + 'NEXT_TIME_BLOCK', + 24.76, + None, + 'AIR_CONDITIONING', + 'ON', + 'HEAT', + 'AUTO', + None, + None, + None, + None, + ), + dict({ + }), + ) +# --- +# name: test_aircon_set_hvac_mode[off-OFF] + _Call( + tuple( + 3, + 'MANUAL', + None, + None, + 'AIR_CONDITIONING', + 'OFF', + ), + dict({ + }), + ) +# --- +# name: test_heater_set_temperature + _Call( + tuple( + 1, + 'NEXT_TIME_BLOCK', + 22.0, + None, + 'HEATING', + 'ON', + 'HEAT', + None, + None, + None, + None, + None, + ), + dict({ + }), + ) +# --- diff --git a/tests/components/tado/test_climate.py b/tests/components/tado/test_climate.py index 5a43c728b6e..0699551c9c0 100644 --- a/tests/components/tado/test_climate.py +++ b/tests/components/tado/test_climate.py @@ -1,5 +1,19 @@ """The sensor tests for the tado platform.""" +from unittest.mock import patch + +from PyTado.interface.api.my_tado import TadoZone +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE from homeassistant.core import HomeAssistant from .util import async_init_integration @@ -121,3 +135,104 @@ async def test_smartac_with_fanlevel_vertical_and_horizontal_swing( # Only test for a subset of attributes in case # HA changes the implementation and a new one appears assert all(item in state.attributes.items() for item in expected_attributes.items()) + + +async def test_heater_set_temperature( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test the set temperature of the heater.""" + + await async_init_integration(hass) + + with ( + patch( + "homeassistant.components.tado.PyTado.interface.api.Tado.set_zone_overlay" + ) as mock_set_state, + patch( + "homeassistant.components.tado.PyTado.interface.api.Tado.get_zone_state", + return_value={"setting": {"temperature": {"celsius": 22.0}}}, + ), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.baseboard_heater", ATTR_TEMPERATURE: 22.0}, + blocking=True, + ) + + mock_set_state.assert_called_once() + snapshot.assert_match(mock_set_state.call_args) + + +@pytest.mark.parametrize( + ("hvac_mode", "set_hvac_mode"), + [ + (HVACMode.HEAT, "HEAT"), + (HVACMode.DRY, "DRY"), + (HVACMode.FAN_ONLY, "FAN"), + (HVACMode.COOL, "COOL"), + (HVACMode.OFF, "OFF"), + ], +) +async def test_aircon_set_hvac_mode( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + hvac_mode: HVACMode, + set_hvac_mode: str, +) -> None: + """Test the set hvac mode of the air conditioning.""" + + await async_init_integration(hass) + + with ( + patch( + "homeassistant.components.tado.__init__.PyTado.interface.api.Tado.set_zone_overlay" + ) as mock_set_state, + patch( + "homeassistant.components.tado.__init__.PyTado.interface.api.Tado.get_zone_state", + return_value=TadoZone( + zone_id=1, + current_temp=18.7, + connection=None, + current_temp_timestamp="2025-01-02T12:51:52.802Z", + current_humidity=45.1, + current_humidity_timestamp="2025-01-02T12:51:52.802Z", + is_away=False, + current_hvac_action="IDLE", + current_fan_speed=None, + current_fan_level=None, + current_hvac_mode=set_hvac_mode, + current_swing_mode="OFF", + current_vertical_swing_mode="OFF", + current_horizontal_swing_mode="OFF", + target_temp=16.0, + available=True, + power="ON", + link="ONLINE", + ac_power_timestamp=None, + heating_power_timestamp="2025-01-02T13:01:11.758Z", + ac_power=None, + heating_power=None, + heating_power_percentage=0.0, + tado_mode="HOME", + overlay_termination_type="MANUAL", + overlay_termination_timestamp=None, + default_overlay_termination_type="MANUAL", + default_overlay_termination_duration=None, + preparation=False, + open_window=False, + open_window_detected=False, + open_window_attr={}, + precision=0.1, + ), + ), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.air_conditioning", ATTR_HVAC_MODE: hvac_mode}, + blocking=True, + ) + + mock_set_state.assert_called_once() + snapshot.assert_match(mock_set_state.call_args) diff --git a/tests/components/tado/test_helper.py b/tests/components/tado/test_helper.py index bdd7977f858..da959c2124a 100644 --- a/tests/components/tado/test_helper.py +++ b/tests/components/tado/test_helper.py @@ -1,45 +1,94 @@ """Helper method tests.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch -from homeassistant.components.tado import TadoConnector +from PyTado.interface import Tado +import pytest + +from homeassistant.components.tado import TadoDataUpdateCoordinator from homeassistant.components.tado.const import ( CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_DEFAULT, CONST_OVERLAY_TADO_MODE, CONST_OVERLAY_TIMER, + DOMAIN, ) from homeassistant.components.tado.helper import decide_duration, decide_overlay_mode +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry -def dummy_tado_connector(hass: HomeAssistant, fallback) -> TadoConnector: + +@pytest.fixture +def entry(request: pytest.FixtureRequest) -> MockConfigEntry: + """Fixture for ConfigEntry with optional fallback.""" + fallback = ( + request.param if hasattr(request, "param") else CONST_OVERLAY_TADO_DEFAULT + ) + return MockConfigEntry( + version=1, + minor_version=1, + domain=DOMAIN, + title="Tado", + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + options={ + "fallback": fallback, + }, + ) + + +@pytest.fixture +def tado() -> Tado: + """Fixture for Tado instance.""" + with patch( + "homeassistant.components.tado.PyTado.interface.api.Tado.set_zone_overlay" + ) as mock_set_zone_overlay: + instance = MagicMock(spec=Tado) + instance.set_zone_overlay = mock_set_zone_overlay + yield instance + + +def dummy_tado_connector( + hass: HomeAssistant, entry: ConfigEntry, tado: Tado +) -> TadoDataUpdateCoordinator: """Return dummy tado connector.""" - return TadoConnector(hass, username="dummy", password="dummy", fallback=fallback) + return TadoDataUpdateCoordinator(hass, entry, tado) -async def test_overlay_mode_duration_set(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("entry", [CONST_OVERLAY_TADO_MODE], indirect=True) +async def test_overlay_mode_duration_set( + hass: HomeAssistant, entry: ConfigEntry, tado: Tado +) -> None: """Test overlay method selection when duration is set.""" - tado = dummy_tado_connector(hass=hass, fallback=CONST_OVERLAY_TADO_MODE) - overlay_mode = decide_overlay_mode(tado=tado, duration=3600, zone_id=1) + tado = dummy_tado_connector(hass=hass, entry=entry, tado=tado) + overlay_mode = decide_overlay_mode(coordinator=tado, duration=3600, zone_id=1) # Must select TIMER overlay assert overlay_mode == CONST_OVERLAY_TIMER -async def test_overlay_mode_next_time_block_fallback(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("entry", [CONST_OVERLAY_TADO_MODE], indirect=True) +async def test_overlay_mode_next_time_block_fallback( + hass: HomeAssistant, entry: ConfigEntry, tado: Tado +) -> None: """Test overlay method selection when duration is not set.""" - integration_fallback = CONST_OVERLAY_TADO_MODE - tado = dummy_tado_connector(hass=hass, fallback=integration_fallback) - overlay_mode = decide_overlay_mode(tado=tado, duration=None, zone_id=1) + tado = dummy_tado_connector(hass=hass, entry=entry, tado=tado) + overlay_mode = decide_overlay_mode(coordinator=tado, duration=None, zone_id=1) # Must fallback to integration wide setting - assert overlay_mode == integration_fallback + assert overlay_mode == CONST_OVERLAY_TADO_MODE -async def test_overlay_mode_tado_default_fallback(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("entry", [CONST_OVERLAY_TADO_DEFAULT], indirect=True) +async def test_overlay_mode_tado_default_fallback( + hass: HomeAssistant, entry: ConfigEntry, tado: Tado +) -> None: """Test overlay method selection when tado default is selected.""" - integration_fallback = CONST_OVERLAY_TADO_DEFAULT zone_fallback = CONST_OVERLAY_MANUAL - tado = dummy_tado_connector(hass=hass, fallback=integration_fallback) + tado = dummy_tado_connector(hass=hass, entry=entry, tado=tado) class MockZoneData: def __init__(self) -> None: @@ -49,28 +98,40 @@ async def test_overlay_mode_tado_default_fallback(hass: HomeAssistant) -> None: zone_data = {"zone": {zone_id: MockZoneData()}} with patch.dict(tado.data, zone_data): - overlay_mode = decide_overlay_mode(tado=tado, duration=None, zone_id=zone_id) + overlay_mode = decide_overlay_mode( + coordinator=tado, duration=None, zone_id=zone_id + ) # Must fallback to zone setting assert overlay_mode == zone_fallback -async def test_duration_enabled_without_tado_default(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("entry", [CONST_OVERLAY_MANUAL], indirect=True) +async def test_duration_enabled_without_tado_default( + hass: HomeAssistant, entry: ConfigEntry, tado: Tado +) -> None: """Test duration decide method when overlay is timer and duration is set.""" overlay = CONST_OVERLAY_TIMER expected_duration = 600 - tado = dummy_tado_connector(hass=hass, fallback=CONST_OVERLAY_MANUAL) + tado = dummy_tado_connector(hass=hass, entry=entry, tado=tado) duration = decide_duration( - tado=tado, duration=expected_duration, overlay_mode=overlay, zone_id=0 + coordinator=tado, duration=expected_duration, overlay_mode=overlay, zone_id=0 ) # Should return the same duration value assert duration == expected_duration -async def test_duration_enabled_with_tado_default(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("entry", [CONST_OVERLAY_TIMER], indirect=True) +async def test_duration_enabled_with_tado_default( + hass: HomeAssistant, entry: ConfigEntry, tado: Tado +) -> None: """Test overlay method selection when ended up with timer overlay and None duration.""" zone_fallback = CONST_OVERLAY_TIMER expected_duration = 45000 - tado = dummy_tado_connector(hass=hass, fallback=zone_fallback) + tado = dummy_tado_connector( + hass=hass, + entry=entry, + tado=tado, + ) class MockZoneData: def __init__(self) -> None: @@ -81,7 +142,7 @@ async def test_duration_enabled_with_tado_default(hass: HomeAssistant) -> None: zone_data = {"zone": {zone_id: MockZoneData()}} with patch.dict(tado.data, zone_data): duration = decide_duration( - tado=tado, duration=None, zone_id=zone_id, overlay_mode=zone_fallback + coordinator=tado, duration=None, zone_id=zone_id, overlay_mode=zone_fallback ) # Must fallback to zone timer setting assert duration == expected_duration diff --git a/tests/components/tado/test_service.py b/tests/components/tado/test_service.py index f1d12d235cc..336bef55ea1 100644 --- a/tests/components/tado/test_service.py +++ b/tests/components/tado/test_service.py @@ -80,7 +80,7 @@ async def test_add_meter_readings_exception( blocking=True, ) - assert "Could not set meter reading" in str(exc) + assert "Error setting Tado meter reading: Error" in str(exc.value) async def test_add_meter_readings_invalid( diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index a76858ab98e..5bf87dbed33 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -188,3 +188,8 @@ async def async_init_integration( if not skip_setup: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() + + # For a first refresh + await entry.runtime_data.coordinator.async_refresh() + await entry.runtime_data.mobile_coordinator.async_refresh() + await hass.async_block_till_done() diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index 23e36eacdd5..a056555f4c0 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -6,8 +6,16 @@ from datetime import datetime from typing import Any from unittest.mock import AsyncMock, MagicMock, patch -from kasa import BaseProtocol, Device, DeviceType, Feature, KasaException, Module -from kasa.interfaces import Fan, Light, LightEffect, LightState +from kasa import ( + BaseProtocol, + Device, + DeviceType, + Feature, + KasaException, + Module, + ThermostatState, +) +from kasa.interfaces import Fan, Light, LightEffect, LightState, Thermostat from kasa.smart.modules.alarm import Alarm from kasa.smartcam.modules.camera import LOCAL_STREAMING_PORT, Camera from syrupy import SnapshotAssertion @@ -197,10 +205,12 @@ def _mocked_device( mod.get_feature.side_effect = device_features.get mod.has_feature.side_effect = lambda id: id in device_features + device.parent = None device.children = [] if children: for child in children: child.mac = mac + child.parent = device device.children = children device.device_type = device_type if device_type else DeviceType.Unknown if ( @@ -359,6 +369,18 @@ def _mocked_camera_module(device): return camera +def _mocked_thermostat_module(device): + therm = MagicMock(auto_spec=Thermostat, name="Mocked thermostat") + therm.state = True + therm.temperature = 20.2 + therm.target_temperature = 22.2 + therm.mode = ThermostatState.Heating + therm.set_state = AsyncMock() + therm.set_target_temperature = AsyncMock() + + return therm + + def _mocked_strip_children(features=None, alias=None) -> list[Device]: plug0 = _mocked_device( alias="Plug0" if alias is None else alias, @@ -427,6 +449,7 @@ MODULE_TO_MOCK_GEN = { Module.Fan: _mocked_fan_module, Module.Alarm: _mocked_alarm_module, Module.Camera: _mocked_camera_module, + Module.Thermostat: _mocked_thermostat_module, } diff --git a/tests/components/tplink/test_climate.py b/tests/components/tplink/test_climate.py index b1c8abd3a9b..adcca24886b 100644 --- a/tests/components/tplink/test_climate.py +++ b/tests/components/tplink/test_climate.py @@ -2,7 +2,7 @@ from datetime import timedelta -from kasa import Device, Feature +from kasa import Device, Feature, Module from kasa.smart.modules.temperaturecontrol import ThermostatState import pytest from syrupy.assertion import SnapshotAssertion @@ -45,31 +45,24 @@ async def mocked_hub(hass: HomeAssistant) -> Device: features = [ _mocked_feature( - "temperature", value=20.2, category=Feature.Category.Primary, unit="celsius" - ), - _mocked_feature( - "target_temperature", - value=22.2, + "temperature", type_=Feature.Type.Number, category=Feature.Category.Primary, unit="celsius", ), _mocked_feature( - "state", - value=True, - type_=Feature.Type.Switch, - category=Feature.Category.Primary, - ), - _mocked_feature( - "thermostat_mode", - value=ThermostatState.Heating, - type_=Feature.Type.Choice, + "target_temperature", + type_=Feature.Type.Number, category=Feature.Category.Primary, + unit="celsius", ), ] thermostat = _mocked_device( - alias="thermostat", features=features, device_type=Device.Type.Thermostat + alias="thermostat", + features=features, + modules=[Module.Thermostat], + device_type=Device.Type.Thermostat, ) return _mocked_device( @@ -121,7 +114,9 @@ async def test_set_temperature( ) -> None: """Test that set_temperature service calls the setter.""" mocked_thermostat = mocked_hub.children[0] - mocked_thermostat.features["target_temperature"].minimum_value = 0 + + therm_module = mocked_thermostat.modules.get(Module.Thermostat) + assert therm_module await setup_platform_for_device( hass, mock_config_entry, Platform.CLIMATE, mocked_hub @@ -133,8 +128,8 @@ async def test_set_temperature( {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 10}, blocking=True, ) - target_temp_feature = mocked_thermostat.features["target_temperature"] - target_temp_feature.set_value.assert_called_with(10) + + therm_module.set_target_temperature.assert_called_with(10) async def test_set_hvac_mode( @@ -146,8 +141,8 @@ async def test_set_hvac_mode( ) mocked_thermostat = mocked_hub.children[0] - mocked_state = mocked_thermostat.features["state"] - assert mocked_state is not None + therm_module = mocked_thermostat.modules.get(Module.Thermostat) + assert therm_module await hass.services.async_call( CLIMATE_DOMAIN, @@ -156,7 +151,7 @@ async def test_set_hvac_mode( blocking=True, ) - mocked_state.set_value.assert_called_with(False) + therm_module.set_state.assert_called_with(False) await hass.services.async_call( CLIMATE_DOMAIN, @@ -164,7 +159,7 @@ async def test_set_hvac_mode( {ATTR_ENTITY_ID: [ENTITY_ID], ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, ) - mocked_state.set_value.assert_called_with(True) + therm_module.set_state.assert_called_with(True) msg = "Tried to set unsupported mode: dry" with pytest.raises(ServiceValidationError, match=msg): @@ -185,7 +180,8 @@ async def test_turn_on_and_off( ) mocked_thermostat = mocked_hub.children[0] - mocked_state = mocked_thermostat.features["state"] + therm_module = mocked_thermostat.modules.get(Module.Thermostat) + assert therm_module await hass.services.async_call( CLIMATE_DOMAIN, @@ -194,7 +190,7 @@ async def test_turn_on_and_off( blocking=True, ) - mocked_state.set_value.assert_called_with(False) + therm_module.set_state.assert_called_with(False) await hass.services.async_call( CLIMATE_DOMAIN, @@ -203,7 +199,7 @@ async def test_turn_on_and_off( blocking=True, ) - mocked_state.set_value.assert_called_with(True) + therm_module.set_state.assert_called_with(True) async def test_unknown_mode( @@ -218,11 +214,31 @@ async def test_unknown_mode( ) mocked_thermostat = mocked_hub.children[0] - mocked_state = mocked_thermostat.features["thermostat_mode"] - mocked_state.value = ThermostatState.Unknown + therm_module = mocked_thermostat.modules.get(Module.Thermostat) + assert therm_module + + therm_module.mode = ThermostatState.Unknown async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.OFF assert "Unknown thermostat state, defaulting to OFF" in caplog.text + + +async def test_missing_feature_attributes( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mocked_hub: Device, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that a module missing the min/max and unit feature logs an error.""" + mocked_thermostat = mocked_hub.children[0] + mocked_thermostat.features.pop("target_temperature") + mocked_thermostat.features.pop("temperature") + + await setup_platform_for_device( + hass, mock_config_entry, Platform.CLIMATE, mocked_hub + ) + assert "Unable to get min/max target temperature" in caplog.text + assert "Unable to get correct temperature unit" in caplog.text diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index ef0ae3b6827..ffcadc79faf 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -1007,8 +1007,8 @@ async def test_automatic_feature_device_addition_and_removal( ), pytest.param( "climate", - [], - ["state", "thermostat_mode", "temperature", "target_temperature"], + [Module.Thermostat], + ["temperature", "target_temperature"], None, DeviceType.Thermostat, id="climate", @@ -1052,6 +1052,10 @@ async def test_automatic_module_device_addition_and_removal( ip_address=IP_ADDRESS3, mac=MAC_ADDRESS3, ) + # Set the parent property for the dynamic children as mock_device only sets + # it on initialization + for child in children.values(): + child.parent = mock_device with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): mock_camera_config_entry.add_to_hass(hass) @@ -1150,3 +1154,73 @@ async def test_automatic_module_device_addition_and_removal( ) assert device_entry assert device_entry.via_device_id == parent_device.id + + +async def test_automatic_device_addition_does_not_remove_disabled_default( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + mock_connect: AsyncMock, + mock_discovery: AsyncMock, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test for automatic device addition does not remove disabled default entities.""" + + features = ["ssid", "signal_level"] + children = { + f"child{index}": _mocked_device( + alias=f"child {index}", + features=features, + device_id=f"child{index}", + ) + for index in range(1, 5) + } + + mock_device = _mocked_device( + alias="hub", + children=[children["child1"], children["child2"]], + features=features, + device_type=DeviceType.Hub, + device_id="hub_parent", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + # Set the parent property for the dynamic children as mock_device only sets + # it on initialization + for child in children.values(): + child.parent = mock_device + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + mock_camera_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_camera_config_entry.entry_id) + await hass.async_block_till_done() + + def check_entities(entity_id_device): + entity_id = f"sensor.{entity_id_device}_signal_level" + state = hass.states.get(entity_id) + assert state + reg_ent = entity_registry.async_get(entity_id) + assert reg_ent + assert reg_ent.disabled is False + + entity_id = f"sensor.{entity_id_device}_ssid" + state = hass.states.get(entity_id) + assert state is None + reg_ent = entity_registry.async_get(entity_id) + assert reg_ent + assert reg_ent.disabled is True + assert reg_ent.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + check_entities("hub") + for child_id in (1, 2): + check_entities(f"child_{child_id}") + + # Add child devices + mock_device.children = [children["child1"], children["child2"], children["child3"]] + freezer.tick(5) + async_fire_time_changed(hass) + + check_entities("hub") + for child_id in (1, 2, 3): + check_entities(f"child_{child_id}") diff --git a/tests/components/vicare/fixtures/Vitocal250A.json b/tests/components/vicare/fixtures/Vitocal250A.json new file mode 100644 index 00000000000..1da43531a89 --- /dev/null +++ b/tests/components/vicare/fixtures/Vitocal250A.json @@ -0,0 +1,4447 @@ +{ + "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.messages.errors.raw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "entries": { + "type": "array", + "value": [] + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.messages.errors.raw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.productIdentification", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "product": { + "type": "object", + "value": { + "busAddress": 1, + "busType": "CanExternal", + "productFamily": "B_00027_VC250", + "viessmannIdentificationNumber": "################" + } + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.productIdentification" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.productMatrix", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "product": { + "type": "array", + "value": [ + { + "busAddress": 1, + "busType": "CanExternal", + "productFamily": "B_00027_VC250", + "viessmannIdentificationNumber": "################" + }, + { + "busAddress": 71, + "busType": "CanExternal", + "productFamily": "B_00012_VCH200", + "viessmannIdentificationNumber": "################" + } + ] + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.productMatrix" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "deviceSerialVitocal250A" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.boiler.sensors.temperature.commonSupply", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 44.6 + } + }, + "timestamp": "2024-10-01T16:28:33.694Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.boiler.sensors.temperature.commonSupply" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.boiler.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "################" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.boiler.serial" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.bufferCylinder.sensors.temperature.main", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.buffer.sensors.temperature.main", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 35.3 + } + }, + "timestamp": "2024-10-01T16:28:33.694Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.buffer.sensors.temperature.main" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.bufferCylinder.sensors.temperature.main", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 35.3 + } + }, + "timestamp": "2024-10-01T16:28:33.694Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.bufferCylinder.sensors.temperature.main" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "enabled": { + "type": "array", + "value": ["1"] + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.circulation.pump", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T16:09:57.180Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.circulation.pump" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.frostprotection", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.frostprotection" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.heating.curve", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.heating.curve" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.heating.schedule", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.heating.schedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.modes.active", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.modes.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.modes.cooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.modes.cooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.modes.heating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.modes.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.modes.heatingCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.modes.heatingCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.modes.standby", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.modes.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.active", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.comfortCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.comfortCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.comfortCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.comfortCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.comfortEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.comfortEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.comfortHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.comfortHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.eco", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.eco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.fixed", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.fixed" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.forcedLastFromSchedule", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.forcedLastFromSchedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.frostprotection", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.frostprotection" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.normalCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.normalCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.normalCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.normalCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.normalEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.normalEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.normalHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.normalHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.reducedCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.reducedCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.reducedCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.reducedCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.reducedEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.reducedEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.reducedHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.reducedHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.standby", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.circuits.N.operating.programs.reducedEnergySaving and heating.circuits.0.operating.programs.eco", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.circuits.0.operating.programs.summerEco", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.operating.programs.summerEco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.remoteController", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.remoteController" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.sensors.temperature.room", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.sensors.temperature.room" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.sensors.temperature.supply", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.sensors.temperature.supply" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.temperature", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.temperature" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.temperature.levels", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.temperature.levels" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.0.zone.mode", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.0.zone.mode" + }, + { + "apiVersion": 1, + "commands": { + "setName": { + "isExecutable": true, + "name": "setName", + "params": { + "name": { + "constraints": { + "maxLength": 20, + "minLength": 1 + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1/commands/setName" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "name": { + "type": "string", + "value": "Heizkreis" + }, + "type": { + "type": "string", + "value": "heatingCircuit" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.circulation.pump", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "on" + } + }, + "timestamp": "2024-10-01T16:09:57.180Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.circulation.pump" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.frostprotection", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "off" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.frostprotection" + }, + { + "apiVersion": 1, + "commands": { + "setCurve": { + "isExecutable": true, + "name": "setCurve", + "params": { + "shift": { + "constraints": { + "max": 40, + "min": -13, + "stepping": 1 + }, + "required": true, + "type": "number" + }, + "slope": { + "constraints": { + "max": 3.5, + "min": 0.2, + "stepping": 0.1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.heating.curve/commands/setCurve" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.heating.curve", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "shift": { + "type": "number", + "unit": "", + "value": 0 + }, + "slope": { + "type": "number", + "unit": "", + "value": 1.1 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.heating.curve" + }, + { + "apiVersion": 1, + "commands": { + "resetSchedule": { + "isExecutable": true, + "name": "resetSchedule", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.heating.schedule/commands/resetSchedule" + }, + "setSchedule": { + "isExecutable": true, + "name": "setSchedule", + "params": { + "newSchedule": { + "constraints": { + "defaultMode": "reduced", + "maxEntries": 4, + "modes": ["normal", "comfort"], + "overlapAllowed": false, + "resolution": 10 + }, + "required": true, + "type": "Schedule" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.heating.schedule/commands/setSchedule" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.heating.schedule", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "entries": { + "type": "Schedule", + "value": { + "fri": [ + { + "end": "22:00", + "mode": "comfort", + "position": 0, + "start": "06:00" + } + ], + "mon": [ + { + "end": "22:00", + "mode": "comfort", + "position": 0, + "start": "06:00" + } + ], + "sat": [ + { + "end": "22:00", + "mode": "comfort", + "position": 0, + "start": "06:00" + } + ], + "sun": [ + { + "end": "22:00", + "mode": "comfort", + "position": 0, + "start": "06:00" + } + ], + "thu": [ + { + "end": "22:00", + "mode": "comfort", + "position": 0, + "start": "06:00" + } + ], + "tue": [ + { + "end": "22:00", + "mode": "comfort", + "position": 0, + "start": "06:00" + } + ], + "wed": [ + { + "end": "22:00", + "mode": "comfort", + "position": 0, + "start": "06:00" + } + ] + } + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.heating.schedule" + }, + { + "apiVersion": 1, + "commands": { + "setName": { + "isExecutable": true, + "name": "setName", + "params": { + "name": { + "constraints": { + "maxLength": 20, + "minLength": 1 + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.name/commands/setName" + } + }, + "components": [], + "deviceId": "0", + "feature": "heating.circuits.1.name", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "name": { + "type": "string", + "value": "Heizkreis" + } + }, + "timestamp": "2024-09-20T08:56:49.795Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.name" + }, + { + "apiVersion": 1, + "commands": { + "setMode": { + "isExecutable": true, + "name": "setMode", + "params": { + "mode": { + "constraints": { + "enum": ["heating", "standby"] + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.modes.active/commands/setMode" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.operating.modes.active", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "heating" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.modes.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.modes.cooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.modes.cooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.modes.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.modes.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.modes.heatingCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.modes.heatingCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.modes.standby", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.modes.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.active", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "comfortHeating" + } + }, + "timestamp": "2024-10-01T03:59:26.407Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.comfortCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.comfortCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.comfortCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "cooling" + }, + "reason": { + "type": "string", + "value": "eco" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.comfortCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.comfortEnergySaving", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "heating" + }, + "reason": { + "type": "string", + "value": "eco" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.comfortEnergySaving" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": false, + "name": "activate", + "params": { + "temperature": { + "constraints": { + "max": 37, + "min": 3, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.comfortHeating/commands/activate" + }, + "deactivate": { + "isExecutable": false, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.comfortHeating/commands/deactivate" + }, + "setTemperature": { + "isExecutable": true, + "name": "setTemperature", + "params": { + "targetTemperature": { + "constraints": { + "max": 37, + "min": 3, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.comfortHeating/commands/setTemperature" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.comfortHeating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "demand": { + "type": "string", + "value": "heating" + }, + "temperature": { + "type": "number", + "unit": "celsius", + "value": 24 + } + }, + "timestamp": "2024-10-01T03:59:26.407Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.comfortHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.eco", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T03:59:26.407Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.eco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.fixed", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.fixed" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.forcedLastFromSchedule/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.forcedLastFromSchedule/commands/deactivate" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.forcedLastFromSchedule", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.forcedLastFromSchedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.frostprotection", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.frostprotection" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.normalCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.normalCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.normalCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "cooling" + }, + "reason": { + "type": "string", + "value": "eco" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.normalCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.normalEnergySaving", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "heating" + }, + "reason": { + "type": "string", + "value": "eco" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.normalEnergySaving" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": false, + "name": "activate", + "params": { + "temperature": { + "constraints": { + "max": 37, + "min": 3, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.normalHeating/commands/activate" + }, + "deactivate": { + "isExecutable": false, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.normalHeating/commands/deactivate" + }, + "setTemperature": { + "isExecutable": true, + "name": "setTemperature", + "params": { + "targetTemperature": { + "constraints": { + "max": 37, + "min": 3, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.normalHeating/commands/setTemperature" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.normalHeating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "heating" + }, + "temperature": { + "type": "number", + "unit": "celsius", + "value": 24 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.normalHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.reducedCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.reducedCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.reducedCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "cooling" + }, + "reason": { + "type": "string", + "value": "eco" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.reducedCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.reducedEnergySaving", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "heating" + }, + "reason": { + "type": "string", + "value": "unknown" + } + }, + "timestamp": "2024-10-01T03:59:26.407Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.reducedEnergySaving" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": false, + "name": "activate", + "params": { + "temperature": { + "constraints": { + "max": 37, + "min": 3, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.reducedHeating/commands/activate" + }, + "deactivate": { + "isExecutable": false, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.reducedHeating/commands/deactivate" + }, + "setTemperature": { + "isExecutable": true, + "name": "setTemperature", + "params": { + "targetTemperature": { + "constraints": { + "max": 37, + "min": 3, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.reducedHeating/commands/setTemperature" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.reducedHeating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "demand": { + "type": "string", + "value": "heating" + }, + "temperature": { + "type": "number", + "unit": "celsius", + "value": 24 + } + }, + "timestamp": "2024-10-01T03:59:26.407Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.reducedHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.standby", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.circuits.N.operating.programs.reducedEnergySaving and heating.circuits.0.operating.programs.eco", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.circuits.1.operating.programs.summerEco", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T03:59:26.407Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.operating.programs.summerEco" + }, + { + "apiVersion": 1, + "commands": { + "removeZigbeeController": { + "isExecutable": false, + "name": "removeZigbeeController", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.remoteController/commands/removeZigbeeController" + }, + "setZigbeeController": { + "isExecutable": true, + "name": "setZigbeeController", + "params": { + "deviceId": { + "constraints": { + "enum": [] + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.remoteController/commands/setZigbeeController" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.remoteController", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.remoteController" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.sensors.temperature.room", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 24.1 + } + }, + "timestamp": "2024-10-01T16:05:52.313Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.sensors.temperature.room" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.sensors.temperature.supply", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 39 + } + }, + "timestamp": "2024-10-01T16:28:40.965Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.sensors.temperature.supply" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.temperature", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T16:26:48.295Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.temperature" + }, + { + "apiVersion": 1, + "commands": { + "setLevels": { + "isExecutable": true, + "name": "setLevels", + "params": { + "maxTemperature": { + "constraints": { + "max": 70, + "min": 10, + "stepping": 1 + }, + "required": true, + "type": "number" + }, + "minTemperature": { + "constraints": { + "max": 30, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.temperature.levels/commands/setLevels" + }, + "setMax": { + "isExecutable": true, + "name": "setMax", + "params": { + "temperature": { + "constraints": { + "max": 70, + "min": 10, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.temperature.levels/commands/setMax" + }, + "setMin": { + "isExecutable": true, + "name": "setMin", + "params": { + "temperature": { + "constraints": { + "max": 30, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.temperature.levels/commands/setMin" + } + }, + "deviceId": "0", + "feature": "heating.circuits.1.temperature.levels", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "max": { + "type": "number", + "unit": "celsius", + "value": 55 + }, + "min": { + "type": "number", + "unit": "celsius", + "value": 20 + } + }, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.temperature.levels" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.1.zone.mode", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.1.zone.mode" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.circulation.pump", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T16:09:57.180Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.circulation.pump" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.frostprotection", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.frostprotection" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.heating.curve", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.heating.curve" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.heating.schedule", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.heating.schedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.modes.active", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.modes.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.modes.cooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.modes.cooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.modes.heating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.modes.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.modes.heatingCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.modes.heatingCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.modes.standby", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.modes.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.active", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.comfortCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.comfortCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.comfortCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.comfortCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.comfortEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.comfortEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.comfortHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.comfortHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.eco", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.eco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.fixed", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.fixed" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.forcedLastFromSchedule", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.forcedLastFromSchedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.frostprotection", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.frostprotection" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.normalCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.normalCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.normalCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.normalCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.normalEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.normalEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.normalHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.normalHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.reducedCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.reducedCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.reducedCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.reducedCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.reducedEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.reducedEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.reducedHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.reducedHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.standby", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.circuits.N.operating.programs.reducedEnergySaving and heating.circuits.0.operating.programs.eco", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.circuits.2.operating.programs.summerEco", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.operating.programs.summerEco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.remoteController", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.remoteController" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.sensors.temperature.room", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.sensors.temperature.room" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.sensors.temperature.supply", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.sensors.temperature.supply" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.temperature", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.temperature" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.temperature.levels", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.temperature.levels" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.2.zone.mode", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.2.zone.mode" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.circulation.pump", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T16:09:57.180Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.circulation.pump" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.frostprotection", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.frostprotection" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.heating.curve", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.heating.curve" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.heating.schedule", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.heating.schedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.modes.active", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.modes.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.modes.cooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.modes.cooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.modes.heating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.modes.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.modes.heatingCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.modes.heatingCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.modes.standby", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.modes.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.active", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.comfortCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.comfortCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.comfortCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.comfortCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.comfortEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.comfortEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.comfortHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.comfortHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.eco", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.eco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.fixed", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.fixed" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.forcedLastFromSchedule", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.forcedLastFromSchedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.frostprotection", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.frostprotection" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.normalCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.normalCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.normalCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.normalCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.normalEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.normalEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.normalHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.normalHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.reducedCooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.reducedCooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.reducedCoolingEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.reducedCoolingEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.reducedEnergySaving", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.reducedEnergySaving" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.reducedHeating", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.reducedHeating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.standby", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.circuits.N.operating.programs.reducedEnergySaving and heating.circuits.0.operating.programs.eco", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.circuits.3.operating.programs.summerEco", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.operating.programs.summerEco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.remoteController", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.remoteController" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.sensors.temperature.room", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.sensors.temperature.room" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.sensors.temperature.supply", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.sensors.temperature.supply" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.temperature", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.temperature" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.temperature.levels", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.temperature.levels" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.circuits.3.zone.mode", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.circuits.3.zone.mode" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "enabled": { + "type": "array", + "value": ["0"] + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "phase": { + "type": "string", + "value": "ready" + } + }, + "timestamp": "2024-10-01T16:12:14.713Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0.heat.production.current", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "watt", + "value": 13.317 + } + }, + "timestamp": "2024-10-01T16:28:29.219Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0.heat.production.current" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0.power.consumption.cooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0.power.consumption.cooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0.power.consumption.current", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "kilowatt", + "value": 3.107 + } + }, + "timestamp": "2024-10-01T16:28:29.219Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0.power.consumption.current" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0.power.consumption.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [7.6, 5.4, 3, 2.6, 4.3, 1.2, 4.2, 2.7] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T11:46:35.700Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [7.6, 93.9, 41.5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T11:46:35.768Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [13, 21.799999999999997, 20.5, 27.4, 16.2] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T11:46:35.700Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [143, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T11:45:28.937Z" + } + }, + "timestamp": "2024-10-01T12:18:26.686Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0.power.consumption.dhw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0.power.consumption.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [16.4, 31.2, 0, 0, 0, 0, 0, 0] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T16:25:33.871Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [16.4, 36.7, 2.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T16:25:33.871Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [47.599999999999994, 0, 0, 5.5, 0] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T16:25:33.871Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [55.2, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T16:25:33.871Z" + } + }, + "timestamp": "2024-10-01T16:27:05.568Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0.power.consumption.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0.power.consumption.total", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [24, 36.6, 3, 2.6, 4.3, 1.2, 4.2, 2.7] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [24, 130.60000000000002, 43.6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [60.599999999999994, 21.799999999999997, 20.5, 32.9, 16.2] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [198.2, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + } + }, + "timestamp": "2024-10-01T16:27:05.568Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0.power.consumption.total" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.compressors.0.statistics", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "hours": { + "type": "number", + "unit": "hour", + "value": 71 + }, + "starts": { + "type": "number", + "unit": "", + "value": 121 + } + }, + "timestamp": "2024-10-01T16:12:54.682Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.compressors.0.statistics" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.device.variant", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "Vitocal250A" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.device.variant" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "status": { + "type": "string", + "value": "on" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": false, + "name": "activate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.hygiene/commands/activate" + }, + "disable": { + "isExecutable": false, + "name": "disable", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.hygiene/commands/disable" + }, + "enable": { + "isExecutable": true, + "name": "enable", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.hygiene/commands/enable" + } + }, + "deviceId": "0", + "feature": "heating.dhw.hygiene", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "enabled": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.hygiene" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.hygiene.trigger", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.hygiene.trigger" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.oneTimeCharge/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.oneTimeCharge/commands/deactivate" + } + }, + "deviceId": "0", + "feature": "heating.dhw.oneTimeCharge", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.oneTimeCharge" + }, + { + "apiVersion": 1, + "commands": { + "setMode": { + "isExecutable": true, + "name": "setMode", + "params": { + "mode": { + "constraints": { + "enum": ["efficientWithMinComfort", "efficient", "off"] + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.active/commands/setMode" + } + }, + "deviceId": "0", + "feature": "heating.dhw.operating.modes.active", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "efficientWithMinComfort" + } + }, + "timestamp": "2024-10-01T00:31:26.139Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.operating.modes.balanced", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.balanced" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.operating.modes.comfort", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:26.139Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.comfort" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.operating.modes.eco", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:26.139Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.eco" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.operating.modes.efficient", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.efficient" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.operating.modes.efficientWithMinComfort", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.efficientWithMinComfort" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.operating.modes.off", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.operating.modes.off" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.pumps.circulation", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.pumps.circulation" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.pumps.circulation.schedule", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.pumps.circulation.schedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.pumps.secondary", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.pumps.secondary" + }, + { + "apiVersion": 1, + "commands": { + "resetSchedule": { + "isExecutable": true, + "name": "resetSchedule", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.schedule/commands/resetSchedule" + }, + "setSchedule": { + "isExecutable": true, + "name": "setSchedule", + "params": { + "newSchedule": { + "constraints": { + "defaultMode": "off", + "maxEntries": 4, + "modes": ["on"], + "overlapAllowed": false, + "resolution": 10 + }, + "required": true, + "type": "Schedule" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.schedule/commands/setSchedule" + } + }, + "deviceId": "0", + "feature": "heating.dhw.schedule", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "entries": { + "type": "Schedule", + "value": { + "fri": [ + { + "end": "22:00", + "mode": "on", + "position": 0, + "start": "08:00" + } + ], + "mon": [ + { + "end": "22:00", + "mode": "on", + "position": 0, + "start": "08:00" + } + ], + "sat": [ + { + "end": "22:00", + "mode": "on", + "position": 0, + "start": "08:00" + } + ], + "sun": [ + { + "end": "22:00", + "mode": "on", + "position": 0, + "start": "08:00" + } + ], + "thu": [ + { + "end": "22:00", + "mode": "on", + "position": 0, + "start": "08:00" + } + ], + "tue": [ + { + "end": "22:00", + "mode": "on", + "position": 0, + "start": "08:00" + } + ], + "wed": [ + { + "end": "22:00", + "mode": "on", + "position": 0, + "start": "08:00" + } + ] + } + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.schedule" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.sensors.temperature.dhwCylinder", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 58.8 + } + }, + "timestamp": "2024-10-01T16:28:40.965Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.sensors.temperature.dhwCylinder" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.sensors.temperature.dhwCylinder.middle", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.sensors.temperature.dhwCylinder.middle" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.sensors.temperature.dhwCylinder.top", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T16:28:40.965Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.sensors.temperature.dhwCylinder.top" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.dhw.sensors.temperature.dhwCylinder", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.dhw.sensors.temperature.hotWaterStorage", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 58.8 + } + }, + "timestamp": "2024-10-01T16:28:40.965Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.sensors.temperature.hotWaterStorage" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.dhw.sensors.temperature.dhwCylinder.middle", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.dhw.sensors.temperature.hotWaterStorage.middle", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.sensors.temperature.hotWaterStorage.middle" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.dhw.sensors.temperature.dhwCylinder.top", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.dhw.sensors.temperature.hotWaterStorage.top", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T16:28:40.965Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.sensors.temperature.hotWaterStorage.top" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.temperature.hygiene", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.hygiene" + }, + { + "apiVersion": 1, + "commands": { + "setHysteresis": { + "isExecutable": true, + "name": "setHysteresis", + "params": { + "hysteresis": { + "constraints": { + "max": 10, + "min": 1, + "stepping": 0.5 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.hysteresis/commands/setHysteresis" + }, + "setHysteresisSwitchOffValue": { + "isExecutable": true, + "name": "setHysteresisSwitchOffValue", + "params": { + "hysteresis": { + "constraints": { + "max": 2.5, + "min": 0, + "stepping": 0.5 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.hysteresis/commands/setHysteresisSwitchOffValue" + }, + "setHysteresisSwitchOnValue": { + "isExecutable": true, + "name": "setHysteresisSwitchOnValue", + "params": { + "hysteresis": { + "constraints": { + "max": 10, + "min": 1, + "stepping": 0.5 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.hysteresis/commands/setHysteresisSwitchOnValue" + } + }, + "deviceId": "0", + "feature": "heating.dhw.temperature.hysteresis", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "switchOffValue": { + "type": "number", + "unit": "kelvin", + "value": 0 + }, + "switchOnValue": { + "type": "number", + "unit": "kelvin", + "value": 5 + }, + "value": { + "type": "number", + "unit": "kelvin", + "value": 5 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.hysteresis" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.dhw.temperature.levels", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "default": { + "type": "number", + "unit": "celsius", + "value": 50 + }, + "max": { + "type": "number", + "unit": "celsius", + "value": 10 + }, + "min": { + "type": "number", + "unit": "celsius", + "value": 10 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.levels" + }, + { + "apiVersion": 1, + "commands": { + "setTargetTemperature": { + "isExecutable": true, + "name": "setTargetTemperature", + "params": { + "temperature": { + "constraints": { + "efficientLowerBorder": 0, + "efficientUpperBorder": 55, + "max": 60, + "min": 10, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.main/commands/setTargetTemperature" + } + }, + "deviceId": "0", + "feature": "heating.dhw.temperature.main", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "number", + "unit": "celsius", + "value": 47 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.dhw.temperature.main" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.heat.production.current", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "watt", + "value": 0 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.heat.production.current" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.power.consumption.current", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "watt", + "value": 0 + } + }, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.power.consumption.current" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.power.consumption.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0, 0, 0, 0] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + } + }, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.power.consumption.dhw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.power.consumption.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0, 0, 0, 0] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + } + }, + "timestamp": "2024-10-01T00:31:26.139Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.power.consumption.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.power.consumption.summary.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "currentDay": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "currentMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "currentYear": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "lastMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "lastSevenDays": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "lastYear": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + } + }, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.power.consumption.summary.dhw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.power.consumption.summary.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "currentDay": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "currentMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "currentYear": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "lastMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "lastSevenDays": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + }, + "lastYear": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + } + }, + "timestamp": "2024-10-01T00:31:26.139Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.power.consumption.summary.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.power.consumption.total", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0, 0, 0, 0] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0, 0, 0, 0] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [0, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + } + }, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.power.consumption.total" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.statistics", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "hours": { + "type": "number", + "unit": "hour", + "value": 0 + }, + "starts": { + "type": "number", + "unit": "", + "value": 0 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.statistics" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.heatingRod.status", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "level1": { + "type": "boolean", + "value": false + }, + "level2": { + "type": "boolean", + "value": false + }, + "level3": { + "type": "boolean", + "value": false + }, + "overall": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.heatingRod.status" + }, + { + "apiVersion": 1, + "commands": { + "changeEndDate": { + "isExecutable": false, + "name": "changeEndDate", + "params": { + "end": { + "constraints": { + "regEx": "^[\\d]{4}-[\\d]{2}-[\\d]{2}$", + "sameDayAllowed": true + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holiday/commands/changeEndDate" + }, + "schedule": { + "isExecutable": true, + "name": "schedule", + "params": { + "end": { + "constraints": { + "regEx": "^[\\d]{4}-[\\d]{2}-[\\d]{2}$", + "sameDayAllowed": true + }, + "required": true, + "type": "string" + }, + "start": { + "constraints": { + "regEx": "^[\\d]{4}-[\\d]{2}-[\\d]{2}$" + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holiday/commands/schedule" + }, + "unschedule": { + "isExecutable": true, + "name": "unschedule", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holiday/commands/unschedule" + } + }, + "deviceId": "0", + "feature": "heating.operating.programs.holiday", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "end": { + "type": "string", + "value": "2000-01-01" + }, + "start": { + "type": "string", + "value": "2000-01-01" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holiday" + }, + { + "apiVersion": 1, + "commands": { + "changeEndDate": { + "isExecutable": false, + "name": "changeEndDate", + "params": { + "end": { + "constraints": { + "regEx": "^[\\d]{4}-[\\d]{2}-[\\d]{2}$", + "sameDayAllowed": true + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holidayAtHome/commands/changeEndDate" + }, + "schedule": { + "isExecutable": true, + "name": "schedule", + "params": { + "end": { + "constraints": { + "regEx": "^[\\d]{4}-[\\d]{2}-[\\d]{2}$", + "sameDayAllowed": true + }, + "required": true, + "type": "string" + }, + "start": { + "constraints": { + "regEx": "^[\\d]{4}-[\\d]{2}-[\\d]{2}$" + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holidayAtHome/commands/schedule" + }, + "unschedule": { + "isExecutable": true, + "name": "unschedule", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holidayAtHome/commands/unschedule" + } + }, + "deviceId": "0", + "feature": "heating.operating.programs.holidayAtHome", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "end": { + "type": "string", + "value": "2000-01-01" + }, + "start": { + "type": "string", + "value": "2000-01-01" + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.operating.programs.holidayAtHome" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.power.consumption.cooling", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T00:31:26.264Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.power.consumption.cooling" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.power.consumption.current", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "kilowatt", + "value": 3.107 + } + }, + "timestamp": "2024-10-01T16:28:29.219Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.power.consumption.current" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.power.consumption.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [7.6, 5.4, 3, 2.6, 4.3, 1.2, 4.2, 2.7] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [7.6, 93.9, 41.5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [13, 21.799999999999997, 20.5, 27.4, 16.2] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [143, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + } + }, + "timestamp": "2024-10-01T12:18:26.686Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.power.consumption.dhw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.power.consumption.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [16.4, 31.2, 0, 0, 0, 0, 0, 0] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [16.4, 36.7, 2.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [47.599999999999994, 0, 0, 5.5, 0] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [55.2, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.552Z" + } + }, + "timestamp": "2024-10-01T16:27:05.568Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.power.consumption.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.power.consumption.summary.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "currentDay": { + "type": "number", + "unit": "kilowattHour", + "value": 7.6 + }, + "currentMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 7.6 + }, + "currentYear": { + "type": "number", + "unit": "kilowattHour", + "value": 143 + }, + "lastMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 93.9 + }, + "lastSevenDays": { + "type": "number", + "unit": "kilowattHour", + "value": 28.3 + }, + "lastYear": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + } + }, + "timestamp": "2024-10-01T11:46:54.639Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.power.consumption.summary.dhw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.power.consumption.summary.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "currentDay": { + "type": "number", + "unit": "kilowattHour", + "value": 16.4 + }, + "currentMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 16.4 + }, + "currentYear": { + "type": "number", + "unit": "kilowattHour", + "value": 55.2 + }, + "lastMonth": { + "type": "number", + "unit": "kilowattHour", + "value": 36.7 + }, + "lastSevenDays": { + "type": "number", + "unit": "kilowattHour", + "value": 47.6 + }, + "lastYear": { + "type": "number", + "unit": "kilowattHour", + "value": 0 + } + }, + "timestamp": "2024-10-01T16:27:05.568Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.power.consumption.summary.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.power.consumption.total", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "day": { + "type": "array", + "unit": "kilowattHour", + "value": [24, 36.6, 3, 2.6, 4.3, 1.2, 4.2, 2.7] + }, + "dayValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + }, + "month": { + "type": "array", + "unit": "kilowattHour", + "value": [24, 130.60000000000002, 43.6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + }, + "monthValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + }, + "week": { + "type": "array", + "unit": "kilowattHour", + "value": [60.599999999999994, 21.799999999999997, 20.5, 32.9, 16.2] + }, + "weekValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + }, + "year": { + "type": "array", + "unit": "kilowattHour", + "value": [198.2, 0] + }, + "yearValueReadAt": { + "type": "string", + "value": "2024-10-01T00:31:23.543Z" + } + }, + "timestamp": "2024-10-01T16:27:05.568Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.power.consumption.total" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.primaryCircuit.sensors.temperature.supply", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 12.8 + } + }, + "timestamp": "2024-10-01T16:28:36.488Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.primaryCircuit.sensors.temperature.supply" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.spf.dhw", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.scop.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "number", + "unit": "", + "value": 4.1 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.scop.dhw" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.spf.heating", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.scop.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "number", + "unit": "", + "value": 3.2 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.scop.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deprecated": { + "info": "replaced by heating.spf.total", + "removalDate": "2024-09-15" + }, + "deviceId": "0", + "feature": "heating.scop.total", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "number", + "unit": "", + "value": 3.9 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.scop.total" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.secondaryCircuit.sensors.temperature.supply", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 45.1 + } + }, + "timestamp": "2024-10-01T16:28:36.488Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.secondaryCircuit.sensors.temperature.supply" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.sensors.pressure.supply", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "bar", + "value": 2.1 + } + }, + "timestamp": "2024-10-01T15:06:07.125Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.sensors.pressure.supply" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.sensors.temperature.allengra", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 35.8 + } + }, + "timestamp": "2024-10-01T16:28:20.497Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.sensors.temperature.allengra" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.sensors.temperature.hydraulicSeparator", + "gatewayId": "################", + "isEnabled": false, + "isReady": true, + "properties": {}, + "timestamp": "2024-10-01T16:28:33.694Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.sensors.temperature.hydraulicSeparator" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.sensors.temperature.outside", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 14.3 + } + }, + "timestamp": "2024-10-01T16:28:36.488Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.sensors.temperature.outside" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.sensors.temperature.return", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "celsius", + "value": 35.3 + } + }, + "timestamp": "2024-10-01T16:28:04.882Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.sensors.temperature.return" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.sensors.volumetricFlow.allengra", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "status": { + "type": "string", + "value": "connected" + }, + "value": { + "type": "number", + "unit": "liter/hour", + "value": 1015 + } + }, + "timestamp": "2024-10-01T16:28:36.488Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.sensors.volumetricFlow.allengra" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.spf.dhw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "number", + "unit": "", + "value": 4.1 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.spf.dhw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.spf.heating", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "number", + "unit": "", + "value": 3.2 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.spf.heating" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.spf.total", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "number", + "unit": "", + "value": 3.9 + } + }, + "timestamp": "2024-10-01T00:31:21.381Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.spf.total" + } + ] +} diff --git a/tests/components/vicare/snapshots/test_sensor.ambr b/tests/components/vicare/snapshots/test_sensor.ambr index 88c3c945253..17c9ee99320 100644 --- a/tests/components/vicare/snapshots/test_sensor.ambr +++ b/tests/components/vicare/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_all_heating_entities[sensor.model0_boiler_temperature-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_boiler_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -34,7 +34,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_boiler_temperature-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_boiler_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', @@ -50,7 +50,7 @@ 'state': '63', }) # --- -# name: test_all_heating_entities[sensor.model0_burner_hours-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_burner_hours-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -85,7 +85,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_burner_hours-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_burner_hours-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Burner hours', @@ -100,7 +100,7 @@ 'state': '18726.3', }) # --- -# name: test_all_heating_entities[sensor.model0_burner_modulation-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_burner_modulation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -135,7 +135,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_all_heating_entities[sensor.model0_burner_modulation-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_burner_modulation-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Burner modulation', @@ -150,7 +150,7 @@ 'state': '0', }) # --- -# name: test_all_heating_entities[sensor.model0_burner_starts-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_burner_starts-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -185,7 +185,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_burner_starts-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_burner_starts-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Burner starts', @@ -199,7 +199,7 @@ 'state': '14315', }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_this_month-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_this_month-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -234,7 +234,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_this_month-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_this_month-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 DHW gas consumption this month', @@ -248,7 +248,7 @@ 'state': '805', }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_this_week-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_this_week-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -283,7 +283,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_this_week-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_this_week-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 DHW gas consumption this week', @@ -297,7 +297,7 @@ 'state': '84', }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_this_year-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_this_year-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -332,7 +332,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_this_year-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_this_year-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 DHW gas consumption this year', @@ -346,7 +346,7 @@ 'state': '8203', }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_today-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -381,7 +381,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_gas_consumption_today-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_gas_consumption_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 DHW gas consumption today', @@ -395,7 +395,7 @@ 'state': '22', }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_max_temperature-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_max_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -430,7 +430,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_max_temperature-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_max_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', @@ -446,7 +446,7 @@ 'state': '60', }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_min_temperature-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_min_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -481,7 +481,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_dhw_min_temperature-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_dhw_min_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', @@ -497,7 +497,7 @@ 'state': '10', }) # --- -# name: test_all_heating_entities[sensor.model0_electricity_consumption_this_week-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_electricity_consumption_this_week-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -532,7 +532,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_electricity_consumption_this_week-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_electricity_consumption_this_week-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -548,7 +548,7 @@ 'state': '0.829', }) # --- -# name: test_all_heating_entities[sensor.model0_electricity_consumption_this_year-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_electricity_consumption_this_year-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -583,7 +583,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_electricity_consumption_this_year-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_electricity_consumption_this_year-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -599,7 +599,7 @@ 'state': '207.106', }) # --- -# name: test_all_heating_entities[sensor.model0_electricity_consumption_today-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_electricity_consumption_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -634,7 +634,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_electricity_consumption_today-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_electricity_consumption_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -650,7 +650,7 @@ 'state': '0.219', }) # --- -# name: test_all_heating_entities[sensor.model0_energy-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -685,7 +685,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_energy-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_energy-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -701,7 +701,7 @@ 'state': '7.843', }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_this_month-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_this_month-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -736,7 +736,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_this_month-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_this_month-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Heating gas consumption this month', @@ -750,7 +750,7 @@ 'state': '0', }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_this_week-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_this_week-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -785,7 +785,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_this_week-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_this_week-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Heating gas consumption this week', @@ -799,7 +799,7 @@ 'state': '0', }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_this_year-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_this_year-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -834,7 +834,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_this_year-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_this_year-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Heating gas consumption this year', @@ -848,7 +848,7 @@ 'state': '30946', }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_today-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -883,7 +883,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_heating_entities[sensor.model0_heating_gas_consumption_today-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_heating_gas_consumption_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Heating gas consumption today', @@ -897,7 +897,7 @@ 'state': '0', }) # --- -# name: test_all_heating_entities[sensor.model0_outside_temperature-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_outside_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -932,7 +932,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_outside_temperature-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_outside_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', @@ -948,7 +948,7 @@ 'state': '20.8', }) # --- -# name: test_all_heating_entities[sensor.model0_supply_temperature-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_supply_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -983,7 +983,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_supply_temperature-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_supply_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', @@ -999,7 +999,7 @@ 'state': '63', }) # --- -# name: test_all_heating_entities[sensor.model0_supply_temperature_2-entry] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_supply_temperature_2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1034,7 +1034,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_heating_entities[sensor.model0_supply_temperature_2-state] +# name: test_all_entities[type:boiler-vicare/Vitodens300W.json][sensor.model0_supply_temperature_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', @@ -1050,7 +1050,1269 @@ 'state': '25.5', }) # --- -# name: test_all_ventilation_entities[sensor.model0_ventilation_level-entry] +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_buffer_main_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_buffer_main_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Buffer main temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'buffer_main_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-buffer main temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_buffer_main_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Buffer main temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_buffer_main_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.3', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_compressor_hours-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_compressor_hours', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Compressor hours', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'compressor_hours', + 'unique_id': 'gateway0_deviceSerialVitocal250A-compressor_hours-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_compressor_hours-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Compressor hours', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_compressor_hours', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '71', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_compressor_phase-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_compressor_phase', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Compressor phase', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'compressor_phase', + 'unique_id': 'gateway0_deviceSerialVitocal250A-compressor_phase-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_compressor_phase-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Compressor phase', + }), + 'context': , + 'entity_id': 'sensor.model0_compressor_phase', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ready', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_compressor_starts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_compressor_starts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Compressor starts', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'compressor_starts', + 'unique_id': 'gateway0_deviceSerialVitocal250A-compressor_starts-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_compressor_starts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Compressor starts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_compressor_starts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '121', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_electricity_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW electricity consumption last seven days', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_summary_dhw_consumption_heating_lastsevendays', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_summary_dhw_consumption_heating_lastsevendays', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 DHW electricity consumption last seven days', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_electricity_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.3', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_this_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_electricity_consumption_this_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW electricity consumption this month', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_dhw_summary_consumption_heating_currentmonth', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_dhw_summary_consumption_heating_currentmonth', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_this_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 DHW electricity consumption this month', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_electricity_consumption_this_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.6', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_electricity_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW electricity consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_dhw_summary_consumption_heating_currentyear', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_dhw_summary_consumption_heating_currentyear', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 DHW electricity consumption this year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_electricity_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '143', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_electricity_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW electricity consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_dhw_summary_consumption_heating_currentday', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_dhw_summary_consumption_heating_currentday', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_electricity_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 DHW electricity consumption today', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_electricity_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.6', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_max_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_max_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW max temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_max_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-hotwater_max_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_max_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW max temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_max_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_min_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_min_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW min temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_min_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-hotwater_min_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_min_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW min temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_min_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_storage_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_storage_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW storage temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dhw_storage_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-dhw_storage_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_dhw_storage_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW storage temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_storage_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58.8', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_electricity_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_electricity_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Electricity consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_consumption_today', + 'unique_id': 'gateway0_deviceSerialVitocal250A-power consumption today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_electricity_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Electricity consumption today', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_electricity_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_electricity_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating electricity consumption last seven days', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_summary_consumption_heating_lastsevendays', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_summary_consumption_heating_lastsevendays', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Heating electricity consumption last seven days', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_electricity_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '47.6', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_this_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_electricity_consumption_this_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating electricity consumption this month', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_summary_consumption_heating_currentmonth', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_summary_consumption_heating_currentmonth', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_this_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Heating electricity consumption this month', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_electricity_consumption_this_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16.4', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_electricity_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating electricity consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_summary_consumption_heating_currentyear', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_summary_consumption_heating_currentyear', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Heating electricity consumption this year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_electricity_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55.2', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_electricity_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating electricity consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_summary_consumption_heating_currentday', + 'unique_id': 'gateway0_deviceSerialVitocal250A-energy_summary_consumption_heating_currentday', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_electricity_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Heating electricity consumption today', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_electricity_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16.4', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_rod_hours-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_heating_rod_hours', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating rod hours', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_rod_hours', + 'unique_id': 'gateway0_deviceSerialVitocal250A-heating_rod_hours', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_rod_hours-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating rod hours', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_rod_hours', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_rod_starts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_heating_rod_starts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating rod starts', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_rod_starts', + 'unique_id': 'gateway0_deviceSerialVitocal250A-heating_rod_starts', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_heating_rod_starts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating rod starts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_rod_starts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_outside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_outside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outside temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outside_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-outside_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_outside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.3', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_primary_circuit_supply_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_primary_circuit_supply_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Primary circuit supply temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'primary_circuit_supply_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-primary_circuit_supply_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_primary_circuit_supply_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Primary circuit supply temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_primary_circuit_supply_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.8', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_return_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_return_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'return_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-return_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_return_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Return temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_return_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.3', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_secondary_circuit_supply_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_secondary_circuit_supply_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Secondary circuit supply temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'secondary_circuit_supply_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-secondary_circuit_supply_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_secondary_circuit_supply_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Secondary circuit supply temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_secondary_circuit_supply_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45.1', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_supply_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_supply_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply pressure', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_pressure', + 'unique_id': 'gateway0_deviceSerialVitocal250A-supply_pressure', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_supply_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'model0 Supply pressure', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_supply_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.1', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_supply_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_supply_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_temperature', + 'unique_id': 'gateway0_deviceSerialVitocal250A-supply_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_supply_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Supply temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_supply_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '39', + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_volumetric_flow-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_volumetric_flow', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volumetric flow', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volumetric_flow', + 'unique_id': 'gateway0_deviceSerialVitocal250A-volumetric_flow', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[type:heatpump-vicare/Vitocal250A.json][sensor.model0_volumetric_flow-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Volumetric flow', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_volumetric_flow', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.015', + }) +# --- +# name: test_all_entities[type:ventilation-vicare/ViAir300F.json][sensor.model0_ventilation_level-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1091,7 +2353,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_ventilation_entities[sensor.model0_ventilation_level-state] +# name: test_all_entities[type:ventilation-vicare/ViAir300F.json][sensor.model0_ventilation_level-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'enum', @@ -1112,7 +2374,7 @@ 'state': 'levelone', }) # --- -# name: test_all_ventilation_entities[sensor.model0_ventilation_reason-entry] +# name: test_all_entities[type:ventilation-vicare/ViAir300F.json][sensor.model0_ventilation_reason-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1154,7 +2416,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_ventilation_entities[sensor.model0_ventilation_reason-state] +# name: test_all_entities[type:ventilation-vicare/ViAir300F.json][sensor.model0_ventilation_reason-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'enum', diff --git a/tests/components/vicare/test_sensor.py b/tests/components/vicare/test_sensor.py index 9b8b69f29db..daad6bfa1c8 100644 --- a/tests/components/vicare/test_sensor.py +++ b/tests/components/vicare/test_sensor.py @@ -16,15 +16,25 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_heating_entities( +@pytest.mark.parametrize( + ("fixture_type", "fixture_data"), + [ + ("type:boiler", "vicare/Vitodens300W.json"), + ("type:heatpump", "vicare/Vitocal250A.json"), + ("type:ventilation", "vicare/ViAir300F.json"), + ], +) +async def test_all_entities( hass: HomeAssistant, + fixture_type: str, + fixture_data: str, snapshot: SnapshotAssertion, mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, ) -> None: """Test all entities.""" fixtures: list[Fixture] = [ - Fixture({"type:boiler"}, "vicare/Vitodens300W.json"), + Fixture({fixture_type}, fixture_data), ] with ( patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), @@ -35,24 +45,6 @@ async def test_all_heating_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_ventilation_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - fixtures: list[Fixture] = [Fixture({"type:ventilation"}, "vicare/ViAir300F.json")] - with ( - patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), - patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_room_sensors( hass: HomeAssistant, diff --git a/tests/components/webostv/__init__.py b/tests/components/webostv/__init__.py index 5027b235eb1..d9a0a135023 100644 --- a/tests/components/webostv/__init__.py +++ b/tests/components/webostv/__init__.py @@ -1,4 +1,4 @@ -"""Tests for the WebOS TV integration.""" +"""Tests for the LG webOS TV integration.""" from homeassistant.components.webostv.const import DOMAIN from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST diff --git a/tests/components/webostv/conftest.py b/tests/components/webostv/conftest.py index 1e3f7ecdc67..bf007f5b936 100644 --- a/tests/components/webostv/conftest.py +++ b/tests/components/webostv/conftest.py @@ -1,4 +1,4 @@ -"""Common fixtures and objects for the LG webOS integration tests.""" +"""Common fixtures and objects for the LG webOS TV integration tests.""" from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch @@ -30,9 +30,15 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="client") def client_fixture(): """Patch of client library for tests.""" - with patch( - "homeassistant.components.webostv.WebOsClient", autospec=True - ) as mock_client_class: + with ( + patch( + "homeassistant.components.webostv.WebOsClient", autospec=True + ) as mock_client_class, + patch( + "homeassistant.components.webostv.config_flow.WebOsClient", + new=mock_client_class, + ), + ): client = mock_client_class.return_value client.hello_info = {"deviceUUID": FAKE_UUID} client.software_info = {"major_ver": "major", "minor_ver": "minor"} diff --git a/tests/components/webostv/const.py b/tests/components/webostv/const.py index 52453d4ffa9..a63a4fe3289 100644 --- a/tests/components/webostv/const.py +++ b/tests/components/webostv/const.py @@ -1,4 +1,4 @@ -"""Constants for LG webOS Smart TV tests.""" +"""Constants for LG webOS TV tests.""" from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.components.webostv.const import LIVE_TV_APP_ID diff --git a/tests/components/webostv/test_config_flow.py b/tests/components/webostv/test_config_flow.py index 38c78bd087a..34ab39618d8 100644 --- a/tests/components/webostv/test_config_flow.py +++ b/tests/components/webostv/test_config_flow.py @@ -1,4 +1,4 @@ -"""Test the WebOS Tv config flow.""" +"""Test the LG webOS TV config flow.""" from aiowebostv import WebOsTvPairError import pytest @@ -103,16 +103,25 @@ async def test_options_flow_live_tv_in_apps( assert result["data"][CONF_SOURCES] == ["Live TV", "Input01", "Input02"] -async def test_options_flow_cannot_retrieve(hass: HomeAssistant, client) -> None: - """Test options config flow cannot retrieve sources.""" +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (WebOsTvPairError, "error_pairing"), + (ConnectionResetError, "cannot_connect"), + ], +) +async def test_options_flow_errors( + hass: HomeAssistant, client, side_effect, error +) -> None: + """Test options config flow errors.""" entry = await setup_webostv(hass) - client.connect.side_effect = ConnectionRefusedError + client.connect.side_effect = side_effect result = await hass.config_entries.options.async_init(entry.entry_id) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_retrieve"} + assert result["errors"] == {"base": error} # recover client.connect.side_effect = None @@ -141,7 +150,7 @@ async def test_form_cannot_connect(hass: HomeAssistant, client) -> None: data=MOCK_USER_CONFIG, ) - client.connect.side_effect = ConnectionRefusedError + client.connect.side_effect = ConnectionResetError result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) @@ -305,7 +314,7 @@ async def test_reauth_successful(hass: HomeAssistant, client) -> None: ("side_effect", "error"), [ (WebOsTvPairError, "error_pairing"), - (ConnectionRefusedError, "cannot_connect"), + (ConnectionResetError, "cannot_connect"), ], ) async def test_reauth_errors(hass: HomeAssistant, client, side_effect, error) -> None: @@ -360,7 +369,7 @@ async def test_reconfigure_successful(hass: HomeAssistant, client) -> None: ("side_effect", "error"), [ (WebOsTvPairError, "error_pairing"), - (ConnectionRefusedError, "cannot_connect"), + (ConnectionResetError, "cannot_connect"), ], ) async def test_reconfigure_errors( diff --git a/tests/components/webostv/test_device_trigger.py b/tests/components/webostv/test_device_trigger.py index 284cd8ad108..c14e8f4542a 100644 --- a/tests/components/webostv/test_device_trigger.py +++ b/tests/components/webostv/test_device_trigger.py @@ -1,4 +1,4 @@ -"""The tests for WebOS TV device triggers.""" +"""The tests for LG webOS TV device triggers.""" import pytest @@ -111,7 +111,7 @@ async def test_invalid_trigger_raises( await setup_webostv(hass) # Test wrong trigger platform type - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match="Unhandled trigger type: wrong.type"): await device_trigger.async_attach_trigger( hass, {"type": "wrong.type", "device_id": "invalid_device_id"}, None, {} ) @@ -140,7 +140,6 @@ async def test_invalid_entry_raises( hass: HomeAssistant, device_registry: dr.DeviceRegistry, client, - caplog: pytest.LogCaptureFixture, domain: str, entry_state: ConfigEntryState, ) -> None: diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index d35dd1fb883..0cf815ce9e2 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -1,4 +1,4 @@ -"""Tests for the diagnostics data provided by LG webOS Smart TV.""" +"""Tests for the diagnostics data provided by LG webOS TV.""" from syrupy.assertion import SnapshotAssertion from syrupy.filters import props diff --git a/tests/components/webostv/test_media_player.py b/tests/components/webostv/test_media_player.py index ab3feac1f2d..d5241dbe668 100644 --- a/tests/components/webostv/test_media_player.py +++ b/tests/components/webostv/test_media_player.py @@ -1,4 +1,4 @@ -"""The tests for the LG webOS media player platform.""" +"""The tests for the LG webOS TV media player platform.""" from datetime import timedelta from http import HTTPStatus @@ -165,7 +165,7 @@ async def test_media_next_previous_track( async def test_select_source_with_empty_source_list( - hass: HomeAssistant, client, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, client ) -> None: """Ensure we don't call client methods when we don't have sources.""" await setup_webostv(hass) @@ -175,11 +175,14 @@ async def test_select_source_with_empty_source_list( ATTR_ENTITY_ID: ENTITY_ID, ATTR_INPUT_SOURCE: "nonexistent", } - await hass.services.async_call(MP_DOMAIN, SERVICE_SELECT_SOURCE, data, True) + with pytest.raises( + HomeAssistantError, + match=f"Source nonexistent not found in the sources list for {TV_NAME}", + ): + await hass.services.async_call(MP_DOMAIN, SERVICE_SELECT_SOURCE, data, True) client.launch_app.assert_not_called() client.set_input.assert_not_called() - assert f"Source nonexistent not found for {TV_NAME}" in caplog.text async def test_select_app_source(hass: HomeAssistant, client) -> None: @@ -482,35 +485,44 @@ async def test_client_key_update_on_connect( assert config_entry.data[CONF_CLIENT_SECRET] == client.client_key +@pytest.mark.parametrize( + ("is_on", "exception", "error_message"), + [ + ( + True, + WebOsTvCommandError("Some error"), + f"Communication error while calling async_media_play for device {TV_NAME}: Some error", + ), + ( + True, + WebOsTvCommandError("Some other error"), + f"Communication error while calling async_media_play for device {TV_NAME}: Some other error", + ), + ( + False, + None, + f"Error calling async_media_play for device {TV_NAME}: Device is off and cannot be controlled", + ), + ], +) async def test_control_error_handling( - hass: HomeAssistant, client, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + client, + is_on: bool, + exception: Exception, + error_message: str, ) -> None: """Test control errors handling.""" await setup_webostv(hass) - client.play.side_effect = WebOsTvCommandError - data = {ATTR_ENTITY_ID: ENTITY_ID} + client.play.side_effect = exception + client.is_on = is_on + await client.mock_state_update() - # Device on, raise HomeAssistantError - with pytest.raises(HomeAssistantError) as exc: + data = {ATTR_ENTITY_ID: ENTITY_ID} + with pytest.raises(HomeAssistantError, match=error_message): await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PLAY, data, True) - assert ( - str(exc.value) - == f"Error calling async_media_play on entity {ENTITY_ID}, state:on" - ) - assert client.play.call_count == 1 - - # Device off, log a warning - client.is_on = False - client.play.side_effect = TimeoutError - await client.mock_state_update() - await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PLAY, data, True) - - assert client.play.call_count == 2 - assert ( - f"Error calling async_media_play on entity {ENTITY_ID}, state:off, error:" - " TimeoutError()" in caplog.text - ) + assert client.play.call_count == int(is_on) async def test_supported_features(hass: HomeAssistant, client) -> None: diff --git a/tests/components/webostv/test_notify.py b/tests/components/webostv/test_notify.py index b12cd0c7c6c..fd56f0ea0bb 100644 --- a/tests/components/webostv/test_notify.py +++ b/tests/components/webostv/test_notify.py @@ -1,8 +1,8 @@ -"""The tests for the WebOS TV notify platform.""" +"""The tests for the LG webOS TV notify platform.""" from unittest.mock import call -from aiowebostv import WebOsTvPairError +from aiowebostv import WebOsTvCommandError import pytest from homeassistant.components.notify import ( @@ -13,6 +13,7 @@ from homeassistant.components.notify import ( from homeassistant.components.webostv import DOMAIN from homeassistant.const import ATTR_ICON from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from homeassistant.util import slugify @@ -74,84 +75,54 @@ async def test_notify(hass: HomeAssistant, client) -> None: ) -async def test_notify_not_connected(hass: HomeAssistant, client) -> None: - """Test sending a message when client is not connected.""" - await setup_webostv(hass) - assert hass.services.has_service(NOTIFY_DOMAIN, SERVICE_NAME) - - client.is_connected.return_value = False - await hass.services.async_call( - NOTIFY_DOMAIN, - SERVICE_NAME, - { - ATTR_MESSAGE: MESSAGE, - ATTR_DATA: { - ATTR_ICON: ICON_PATH, - }, - }, - blocking=True, - ) - assert client.mock_calls[0] == call.connect() - assert client.connect.call_count == 2 - client.send_message.assert_called_with(MESSAGE, icon_path=ICON_PATH) - - -async def test_icon_not_found( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client -) -> None: - """Test notify icon not found error.""" - await setup_webostv(hass) - assert hass.services.has_service(NOTIFY_DOMAIN, SERVICE_NAME) - - client.send_message.side_effect = FileNotFoundError - await hass.services.async_call( - NOTIFY_DOMAIN, - SERVICE_NAME, - { - ATTR_MESSAGE: MESSAGE, - ATTR_DATA: { - ATTR_ICON: ICON_PATH, - }, - }, - blocking=True, - ) - assert client.mock_calls[0] == call.connect() - assert client.connect.call_count == 1 - client.send_message.assert_called_with(MESSAGE, icon_path=ICON_PATH) - assert f"Icon {ICON_PATH} not found" in caplog.text - - @pytest.mark.parametrize( - ("side_effect", "error"), + ("is_on", "exception", "error_message"), [ - (WebOsTvPairError, "Pairing with TV failed"), - (ConnectionRefusedError, "TV unreachable"), + ( + True, + WebOsTvCommandError("Some error"), + f"Communication error while sending notification to device {TV_NAME}: Some error", + ), + ( + True, + FileNotFoundError("Some other error"), + f"Icon {ICON_PATH} not found when sending notification for device {TV_NAME}", + ), + ( + False, + None, + f"Error sending notification to device {TV_NAME}: Device is off and cannot be controlled", + ), ], ) -async def test_connection_errors( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client, side_effect, error +async def test_errors( + hass: HomeAssistant, + client, + is_on: bool, + exception: Exception, + error_message: str, ) -> None: - """Test connection errors scenarios.""" + """Test error scenarios.""" await setup_webostv(hass) + client.is_on = is_on + assert hass.services.has_service("notify", SERVICE_NAME) - client.is_connected.return_value = False - client.connect.side_effect = side_effect - await hass.services.async_call( - NOTIFY_DOMAIN, - SERVICE_NAME, - { - ATTR_MESSAGE: MESSAGE, - ATTR_DATA: { - ATTR_ICON: ICON_PATH, + client.send_message.side_effect = exception + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + NOTIFY_DOMAIN, + SERVICE_NAME, + { + ATTR_MESSAGE: MESSAGE, + ATTR_DATA: { + ATTR_ICON: ICON_PATH, + }, }, - }, - blocking=True, - ) - assert client.mock_calls[0] == call.connect() - assert client.connect.call_count == 2 - client.send_message.assert_not_called() - assert error in caplog.text + blocking=True, + ) + + assert client.send_message.call_count == int(is_on) async def test_no_discovery_info( diff --git a/tests/components/webostv/test_trigger.py b/tests/components/webostv/test_trigger.py index d7eeae28ea3..c7decafff73 100644 --- a/tests/components/webostv/test_trigger.py +++ b/tests/components/webostv/test_trigger.py @@ -1,4 +1,4 @@ -"""The tests for WebOS TV automation triggers.""" +"""The tests for LG webOS TV automation triggers.""" from unittest.mock import patch @@ -118,10 +118,10 @@ async def test_webostv_turn_on_trigger_entity_id( assert service_calls[1].data["id"] == 0 -async def test_wrong_trigger_platform_type( +async def test_unknown_trigger_platform_type( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client ) -> None: - """Test wrong trigger platform type.""" + """Test unknown trigger platform type.""" await setup_webostv(hass) await async_setup_component( @@ -131,7 +131,7 @@ async def test_wrong_trigger_platform_type( automation.DOMAIN: [ { "trigger": { - "platform": "webostv.wrong_type", + "platform": "webostv.unknown", "entity_id": ENTITY_ID, }, "action": { @@ -146,10 +146,7 @@ async def test_wrong_trigger_platform_type( }, ) - assert ( - "ValueError: Unknown webOS Smart TV trigger platform webostv.wrong_type" - in caplog.text - ) + assert "Unknown trigger platform: webostv.unknown" in caplog.text async def test_trigger_invalid_entity_id( @@ -185,7 +182,4 @@ async def test_trigger_invalid_entity_id( }, ) - assert ( - f"ValueError: Entity {invalid_entity} is not a valid webostv entity" - in caplog.text - ) + assert f"Entity {invalid_entity} is not a valid {DOMAIN} entity" in caplog.text diff --git a/tests/components/whirlpool/conftest.py b/tests/components/whirlpool/conftest.py index 50620b20b8b..c302922fe25 100644 --- a/tests/components/whirlpool/conftest.py +++ b/tests/components/whirlpool/conftest.py @@ -39,7 +39,12 @@ def fixture_brand(request: pytest.FixtureRequest) -> tuple[str, Brand]: @pytest.fixture(name="mock_auth_api") def fixture_mock_auth_api(): """Set up Auth fixture.""" - with mock.patch("homeassistant.components.whirlpool.Auth") as mock_auth: + with ( + mock.patch("homeassistant.components.whirlpool.Auth") as mock_auth, + mock.patch( + "homeassistant.components.whirlpool.config_flow.Auth", new=mock_auth + ), + ): mock_auth.return_value.do_auth = AsyncMock() mock_auth.return_value.is_access_token_valid.return_value = True yield mock_auth @@ -48,9 +53,15 @@ def fixture_mock_auth_api(): @pytest.fixture(name="mock_appliances_manager_api") def fixture_mock_appliances_manager_api(): """Set up AppliancesManager fixture.""" - with mock.patch( - "homeassistant.components.whirlpool.AppliancesManager" - ) as mock_appliances_manager: + with ( + mock.patch( + "homeassistant.components.whirlpool.AppliancesManager" + ) as mock_appliances_manager, + mock.patch( + "homeassistant.components.whirlpool.config_flow.AppliancesManager", + new=mock_appliances_manager, + ), + ): mock_appliances_manager.return_value.fetch_appliances = AsyncMock() mock_appliances_manager.return_value.aircons = [ {"SAID": MOCK_SAID1, "NAME": "TestZone"}, @@ -81,9 +92,15 @@ def fixture_mock_appliances_manager_laundry_api(): @pytest.fixture(name="mock_backend_selector_api") def fixture_mock_backend_selector_api(): """Set up BackendSelector fixture.""" - with mock.patch( - "homeassistant.components.whirlpool.BackendSelector" - ) as mock_backend_selector: + with ( + mock.patch( + "homeassistant.components.whirlpool.BackendSelector" + ) as mock_backend_selector, + mock.patch( + "homeassistant.components.whirlpool.config_flow.BackendSelector", + new=mock_backend_selector, + ), + ): yield mock_backend_selector diff --git a/tests/components/whirlpool/test_config_flow.py b/tests/components/whirlpool/test_config_flow.py index 1240e1303e1..94a34c96e2c 100644 --- a/tests/components/whirlpool/test_config_flow.py +++ b/tests/components/whirlpool/test_config_flow.py @@ -1,9 +1,10 @@ """Test the Whirlpool Sixth Sense config flow.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch import aiohttp from aiohttp.client_exceptions import ClientConnectionError +import pytest from homeassistant import config_entries from homeassistant.components.whirlpool.const import CONF_BRAND, DOMAIN @@ -19,7 +20,10 @@ CONFIG_INPUT = { } -async def test_form(hass: HomeAssistant, region, brand) -> None: +@pytest.mark.usefixtures("mock_auth_api", "mock_appliances_manager_api") +async def test_form( + hass: HomeAssistant, region, brand, mock_backend_selector_api: MagicMock +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -28,28 +32,9 @@ async def test_form(hass: HomeAssistant, region, brand) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == config_entries.SOURCE_USER - with ( - patch("homeassistant.components.whirlpool.config_flow.Auth.do_auth"), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.is_access_token_valid", - return_value=True, - ), - patch( - "homeassistant.components.whirlpool.config_flow.BackendSelector" - ) as mock_backend_selector, - patch( - "homeassistant.components.whirlpool.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "homeassistant.components.whirlpool.config_flow.AppliancesManager.aircons", - return_value=["test"], - ), - patch( - "homeassistant.components.whirlpool.config_flow.AppliancesManager.fetch_appliances", - return_value=True, - ), - ): + with patch( + "homeassistant.components.whirlpool.async_setup_entry", return_value=True + ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, @@ -65,92 +50,99 @@ async def test_form(hass: HomeAssistant, region, brand) -> None: "brand": brand[0], } assert len(mock_setup_entry.mock_calls) == 1 - mock_backend_selector.assert_called_once_with(brand[1], region[1]) + mock_backend_selector_api.assert_called_once_with(brand[1], region[1]) -async def test_form_invalid_auth(hass: HomeAssistant, region, brand) -> None: +async def test_form_invalid_auth( + hass: HomeAssistant, region, brand, mock_auth_api: MagicMock +) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch("homeassistant.components.whirlpool.config_flow.Auth.do_auth"), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.is_access_token_valid", - return_value=False, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) + + mock_auth_api.return_value.is_access_token_valid.return_value = False + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "invalid_auth"} -async def test_form_cannot_connect(hass: HomeAssistant, region, brand) -> None: +async def test_form_cannot_connect( + hass: HomeAssistant, + region, + brand, + mock_auth_api: MagicMock, +) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "homeassistant.components.whirlpool.config_flow.Auth.do_auth", - side_effect=aiohttp.ClientConnectionError, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_INPUT - | { - "region": region[0], - "brand": brand[0], - }, - ) + + mock_auth_api.return_value.do_auth.side_effect = aiohttp.ClientConnectionError + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_INPUT + | { + "region": region[0], + "brand": brand[0], + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} -async def test_form_auth_timeout(hass: HomeAssistant, region, brand) -> None: +async def test_form_auth_timeout( + hass: HomeAssistant, + region, + brand, + mock_auth_api: MagicMock, +) -> None: """Test we handle auth timeout error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "homeassistant.components.whirlpool.config_flow.Auth.do_auth", - side_effect=TimeoutError, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_INPUT - | { - "region": region[0], - "brand": brand[0], - }, - ) + + mock_auth_api.return_value.do_auth.side_effect = TimeoutError + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_INPUT + | { + "region": region[0], + "brand": brand[0], + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} -async def test_form_generic_auth_exception(hass: HomeAssistant, region, brand) -> None: +async def test_form_generic_auth_exception( + hass: HomeAssistant, + region, + brand, + mock_auth_api: MagicMock, +) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "homeassistant.components.whirlpool.config_flow.Auth.do_auth", - side_effect=Exception, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_INPUT - | { - "region": region[0], - "brand": brand[0], - }, - ) + + mock_auth_api.return_value.do_auth.side_effect = Exception + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_INPUT + | { + "region": region[0], + "brand": brand[0], + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} +@pytest.mark.usefixtures("mock_auth_api", "mock_appliances_manager_api") async def test_form_already_configured(hass: HomeAssistant, region, brand) -> None: """Test we handle cannot connect error.""" mock_entry = MockConfigEntry( @@ -167,36 +159,24 @@ async def test_form_already_configured(hass: HomeAssistant, region, brand) -> No assert result["type"] is FlowResultType.FORM assert result["step_id"] == config_entries.SOURCE_USER - with ( - patch("homeassistant.components.whirlpool.config_flow.Auth.do_auth"), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.is_access_token_valid", - return_value=True, - ), - patch( - "homeassistant.components.whirlpool.config_flow.AppliancesManager.aircons", - return_value=["test"], - ), - patch( - "homeassistant.components.whirlpool.config_flow.AppliancesManager.fetch_appliances", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_INPUT - | { - "region": region[0], - "brand": brand[0], - }, - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_INPUT + | { + "region": region[0], + "brand": brand[0], + }, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" -async def test_no_appliances_flow(hass: HomeAssistant, region, brand) -> None: +@pytest.mark.usefixtures("mock_auth_api") +async def test_no_appliances_flow( + hass: HomeAssistant, region, brand, mock_appliances_manager_api: MagicMock +) -> None: """Test we get an error with no appliances.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -205,27 +185,19 @@ async def test_no_appliances_flow(hass: HomeAssistant, region, brand) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == config_entries.SOURCE_USER - with ( - patch("homeassistant.components.whirlpool.config_flow.Auth.do_auth"), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.is_access_token_valid", - return_value=True, - ), - patch( - "homeassistant.components.whirlpool.config_flow.AppliancesManager.fetch_appliances", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) - await hass.async_block_till_done() + mock_appliances_manager_api.return_value.aircons = [] + mock_appliances_manager_api.return_value.washer_dryers = [] + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "no_appliances"} +@pytest.mark.usefixtures("mock_auth_api", "mock_appliances_manager_api") async def test_reauth_flow(hass: HomeAssistant, region, brand) -> None: """Test a successful reauth flow.""" mock_entry = MockConfigEntry( @@ -241,24 +213,8 @@ async def test_reauth_flow(hass: HomeAssistant, region, brand) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with ( - patch( - "homeassistant.components.whirlpool.async_setup_entry", - return_value=True, - ), - patch("homeassistant.components.whirlpool.config_flow.Auth.do_auth"), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.is_access_token_valid", - return_value=True, - ), - patch( - "homeassistant.components.whirlpool.config_flow.AppliancesManager.aircons", - return_value=["test"], - ), - patch( - "homeassistant.components.whirlpool.config_flow.AppliancesManager.fetch_appliances", - return_value=True, - ), + with patch( + "homeassistant.components.whirlpool.async_setup_entry", return_value=True ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -276,7 +232,10 @@ async def test_reauth_flow(hass: HomeAssistant, region, brand) -> None: } -async def test_reauth_flow_auth_error(hass: HomeAssistant, region, brand) -> None: +@pytest.mark.usefixtures("mock_appliances_manager_api") +async def test_reauth_flow_auth_error( + hass: HomeAssistant, region, brand, mock_auth_api: MagicMock +) -> None: """Test an authorization error reauth flow.""" mock_entry = MockConfigEntry( @@ -290,16 +249,10 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, region, brand) -> Non assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with ( - patch( - "homeassistant.components.whirlpool.async_setup_entry", - return_value=True, - ), - patch("homeassistant.components.whirlpool.config_flow.Auth.do_auth"), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.is_access_token_valid", - return_value=False, - ), + + mock_auth_api.return_value.is_access_token_valid.return_value = False + with patch( + "homeassistant.components.whirlpool.async_setup_entry", return_value=True ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -311,8 +264,9 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, region, brand) -> Non assert result2["errors"] == {"base": "invalid_auth"} +@pytest.mark.usefixtures("mock_appliances_manager_api") async def test_reauth_flow_connnection_error( - hass: HomeAssistant, region, brand + hass: HomeAssistant, region, brand, mock_auth_api: MagicMock ) -> None: """Test a connection error reauth flow.""" @@ -329,25 +283,14 @@ async def test_reauth_flow_connnection_error( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with ( - patch( - "homeassistant.components.whirlpool.async_setup_entry", - return_value=True, - ), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.do_auth", - side_effect=ClientConnectionError, - ), - patch( - "homeassistant.components.whirlpool.config_flow.Auth.is_access_token_valid", - return_value=False, - ), + mock_auth_api.return_value.do_auth.side_effect = ClientConnectionError + with patch( + "homeassistant.components.whirlpool.async_setup_entry", return_value=True ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_PASSWORD: "new-password", CONF_BRAND: brand[0]}, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/youless/snapshots/test_sensor.ambr b/tests/components/youless/snapshots/test_sensor.ambr index bcfd0139e5c..3424a264f48 100644 --- a/tests/components/youless/snapshots/test_sensor.ambr +++ b/tests/components/youless/snapshots/test_sensor.ambr @@ -24,7 +24,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:transmission-tower-import', 'original_name': 'Energy delivery high', 'platform': 'youless', 'previous_unique_id': None, @@ -39,6 +39,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Energy delivery high', + 'icon': 'mdi:transmission-tower-import', 'state_class': , 'unit_of_measurement': , }), @@ -75,7 +76,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:transmission-tower-import', 'original_name': 'Energy delivery low', 'platform': 'youless', 'previous_unique_id': None, @@ -90,6 +91,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Energy delivery low', + 'icon': 'mdi:transmission-tower-import', 'state_class': , 'unit_of_measurement': , }), @@ -126,7 +128,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:transmission-tower-export', 'original_name': 'Energy high', 'platform': 'youless', 'previous_unique_id': None, @@ -141,6 +143,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Energy high', + 'icon': 'mdi:transmission-tower-export', 'state_class': , 'unit_of_measurement': , }), @@ -177,7 +180,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:transmission-tower-export', 'original_name': 'Energy low', 'platform': 'youless', 'previous_unique_id': None, @@ -192,6 +195,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Energy low', + 'icon': 'mdi:transmission-tower-export', 'state_class': , 'unit_of_measurement': , }), @@ -228,7 +232,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:transmission-tower-export', 'original_name': 'Energy total', 'platform': 'youless', 'previous_unique_id': None, @@ -243,6 +247,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Energy total', + 'icon': 'mdi:transmission-tower-export', 'state_class': , 'unit_of_measurement': , }), @@ -279,7 +284,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:meter-electric', 'original_name': 'Extra total', 'platform': 'youless', 'previous_unique_id': None, @@ -294,6 +299,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Extra total', + 'icon': 'mdi:meter-electric', 'state_class': , 'unit_of_measurement': , }), @@ -330,7 +336,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:lightning-bolt', 'original_name': 'Extra usage', 'platform': 'youless', 'previous_unique_id': None, @@ -345,6 +351,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Extra usage', + 'icon': 'mdi:lightning-bolt', 'state_class': , 'unit_of_measurement': , }), @@ -456,7 +463,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_1_power-entry] @@ -507,7 +514,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_1_voltage-entry] @@ -558,7 +565,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_2_current-entry] @@ -609,7 +616,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_2_power-entry] @@ -660,7 +667,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_2_voltage-entry] @@ -711,7 +718,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_3_current-entry] @@ -762,7 +769,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_3_power-entry] @@ -813,7 +820,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.phase_3_voltage-entry] @@ -864,7 +871,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'unknown', }) # --- # name: test_sensors[sensor.power_usage-entry] @@ -892,7 +899,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': None, + 'original_icon': 'mdi:meter-electric', 'original_name': 'Power Usage', 'platform': 'youless', 'previous_unique_id': None, @@ -907,6 +914,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Power Usage', + 'icon': 'mdi:meter-electric', 'state_class': , 'unit_of_measurement': , }), diff --git a/tests/conftest.py b/tests/conftest.py index a64543337b9..de627925941 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -102,6 +102,7 @@ from .typing import ( MqttMockHAClient, MqttMockHAClientGenerator, MqttMockPahoClient, + RecorderInstanceContextManager, RecorderInstanceGenerator, WebSocketGenerator, ) @@ -1536,7 +1537,7 @@ async def async_test_recorder( enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, enable_migrate_event_ids: bool, -) -> AsyncGenerator[RecorderInstanceGenerator]: +) -> AsyncGenerator[RecorderInstanceContextManager]: """Yield context manager to setup recorder instance.""" # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder @@ -1702,7 +1703,7 @@ async def async_test_recorder( @pytest.fixture async def async_setup_recorder_instance( - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, ) -> AsyncGenerator[RecorderInstanceGenerator]: """Yield callable to setup recorder instance.""" @@ -1715,7 +1716,7 @@ async def async_setup_recorder_instance( expected_setup_result: bool = True, wait_recorder: bool = True, wait_recorder_setup: bool = True, - ) -> AsyncGenerator[recorder.Recorder]: + ) -> recorder.Recorder: """Set up and return recorder instance.""" return await stack.enter_async_context( @@ -1734,7 +1735,7 @@ async def async_setup_recorder_instance( @pytest.fixture async def recorder_mock( recorder_config: dict[str, Any] | None, - async_test_recorder: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceContextManager, hass: HomeAssistant, ) -> AsyncGenerator[recorder.Recorder]: """Fixture with in-memory recorder.""" diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index 5348348bb0d..57e151ba8eb 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -1,15 +1,17 @@ """Tests for the llm helpers.""" +from datetime import timedelta from decimal import Decimal from unittest.mock import patch import pytest import voluptuous as vol +from homeassistant.components import calendar from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.intent import async_register_timer_handler from homeassistant.components.script.config import ScriptConfig -from homeassistant.core import Context, HomeAssistant, State +from homeassistant.core import Context, HomeAssistant, State, SupportsResponse from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( area_registry as ar, @@ -22,8 +24,9 @@ from homeassistant.helpers import ( selector, ) from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_mock_service @pytest.fixture @@ -1162,3 +1165,96 @@ async def test_selector_serializer( assert selector_serializer(selector.FileSelector({"accept": ".txt"})) == { "type": "string" } + + +async def test_calendar_get_events_tool(hass: HomeAssistant) -> None: + """Test the calendar get events tool.""" + assert await async_setup_component(hass, "homeassistant", {}) + hass.states.async_set("calendar.test_calendar", "on", {"friendly_name": "Test"}) + async_expose_entity(hass, "conversation", "calendar.test_calendar", True) + context = Context() + llm_context = llm.LLMContext( + platform="test_platform", + context=context, + user_prompt="test_text", + language="*", + assistant="conversation", + device_id=None, + ) + api = await llm.async_get_api(hass, "assist", llm_context) + assert [tool for tool in api.tools if tool.name == "calendar_get_events"] + + calls = async_mock_service( + hass, + domain=calendar.DOMAIN, + service=calendar.SERVICE_GET_EVENTS, + schema=calendar.SERVICE_GET_EVENTS_SCHEMA, + response={ + "calendar.test_calendar": { + "events": [ + { + "start": "2025-09-17", + "end": "2025-09-18", + "summary": "Home Assistant 12th birthday", + "description": "", + }, + { + "start": "2025-09-17T14:00:00-05:00", + "end": "2025-09-18T15:00:00-05:00", + "summary": "Champagne", + "description": "", + }, + ] + } + }, + supports_response=SupportsResponse.ONLY, + ) + + tool_input = llm.ToolInput( + tool_name="calendar_get_events", + tool_args={"calendar": "calendar.test_calendar", "range": "today"}, + ) + now = dt_util.now() + with patch("homeassistant.util.dt.now", return_value=now): + response = await api.async_call_tool(tool_input) + + assert len(calls) == 1 + call = calls[0] + assert call.domain == calendar.DOMAIN + assert call.service == calendar.SERVICE_GET_EVENTS + assert call.data == { + "entity_id": ["calendar.test_calendar"], + "start_date_time": now, + "end_date_time": dt_util.start_of_local_day() + timedelta(days=1), + } + + assert response == { + "success": True, + "result": [ + { + "start": "2025-09-17", + "end": "2025-09-18", + "summary": "Home Assistant 12th birthday", + "description": "", + "all_day": True, + }, + { + "start": "2025-09-17T14:00:00-05:00", + "end": "2025-09-18T15:00:00-05:00", + "summary": "Champagne", + "description": "", + }, + ], + } + + tool_input.tool_args["range"] = "week" + with patch("homeassistant.util.dt.now", return_value=now): + response = await api.async_call_tool(tool_input) + + assert len(calls) == 2 + call = calls[1] + assert call.data == { + "entity_id": ["calendar.test_calendar"], + "start_date_time": now, + "end_date_time": dt_util.start_of_local_day() + timedelta(days=7), + } diff --git a/tests/typing.py b/tests/typing.py index 7b61949a9c4..5bcb1a01104 100644 --- a/tests/typing.py +++ b/tests/typing.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Callable, Coroutine +from contextlib import AbstractAsyncContextManager from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock @@ -30,6 +31,10 @@ type MqttMockHAClient = MagicMock """MagicMock for `homeassistant.components.mqtt.MQTT`.""" type MqttMockHAClientGenerator = Callable[..., Coroutine[Any, Any, MqttMockHAClient]] """MagicMock generator for `homeassistant.components.mqtt.MQTT`.""" +type RecorderInstanceContextManager = Callable[ + ..., AbstractAsyncContextManager[Recorder] +] +"""ContextManager for `homeassistant.components.recorder.Recorder`.""" type RecorderInstanceGenerator = Callable[..., Coroutine[Any, Any, Recorder]] """Instance generator for `homeassistant.components.recorder.Recorder`.""" type WebSocketGenerator = Callable[..., Coroutine[Any, Any, MockHAClientWebSocket]]